Importing all the necessary libraries

In [78]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import os
import cv2
import tensorflow as tf
import warnings
warnings.filterwarnings('ignore')

PART - ONE

Plant Seedling Classifier

In [2]:
import glob
# train_directory = 'C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\train'
categories = glob.glob('C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\train\\*')
for category in categories:
    for images in os.listdir(category):
        img_array = cv2.imread(os.path.join(category,images),cv2.IMREAD_COLOR)
#         print(img_array)
        plt.imshow(img_array)
        plt.show()
        break
    break
In [3]:
img_array.shape
Out[3]:
(196, 196, 3)
In [4]:
training_data = []

def create_training_data():
    categories = glob.glob('C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\train\\*')
    for category in categories:
        class_num = categories.index(category)
        for images in os.listdir(category):
            img_array = cv2.imread(os.path.join(category,images),cv2.IMREAD_COLOR)
            image_size = 196
            new_img_array = cv2.resize(img_array,(image_size,image_size))
            training_data.append([new_img_array,class_num])
            
create_training_data()
In [5]:
print(len(training_data))
4767
In [6]:
import random
random.shuffle(training_data)
image_size = 196

X = []
y = []

for features,label in training_data:
    X.append(features)
    y.append(label)

X = np.array(X).reshape(-1,image_size,image_size,3)
print('The shape of X after reshaping is:',X.shape)
The shape of X after reshaping is: (4767, 196, 196, 3)
In [7]:
import pickle

pickle_X = open("X_seedling.pickle","wb")
pickle.dump(X,pickle_X)
pickle_X.close()

pickle_Y = open("Y_seedling.pickle","wb")
pickle.dump(y,pickle_Y)
pickle_Y.close()
In [8]:
pickle_in = open("X_seedling.pickle","rb")
X = pickle.load(pickle_in)
print('The shape of X is: ',X.shape)

pickle_in = open("Y_seedling.pickle","rb")
y = pickle.load(pickle_in)
y = np.array(y).reshape(len(y),1)
print('The shape of y is: ',y.shape)
The shape of X is:  (4767, 196, 196, 3)
The shape of y is:  (4767, 1)
In [9]:
X = X.astype('float32') / 255
#Reshaping the data from 3D to 1D 
X_ = np.asarray(X).reshape(X.shape[0], X.shape[1]*X.shape[2]*X.shape[3])
y_ = y
y_ = tf.keras.utils.to_categorical(y, num_classes = 12)

Plant Seedling Classifier - Using KNN

In [10]:
# Performing train/test split
from sklearn.model_selection import train_test_split

X_train,X_test,y_train,y_test = train_test_split(X_,y,test_size=0.25,random_state=42)

print('The shape of X_train is: ',X_train.shape)
print('The shape of y_train is: ',y_train.shape)
print('The shape of X_test is: ',X_test.shape)
print('The shape of y_test is: ',y_test.shape)
The shape of X_train is:  (3575, 115248)
The shape of y_train is:  (3575, 1)
The shape of X_test is:  (1192, 115248)
The shape of y_test is:  (1192, 1)
In [11]:
from sklearn.neighbors import KNeighborsClassifier
import time

StartTime = time.time()

model_knn = KNeighborsClassifier(n_neighbors=3,weights='distance')
model_knn = model_knn.fit(X_train,y_train)
    
y_pred = model_knn.predict(X_test)
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
############### Total Time Taken:  19 Minutes #############
In [12]:
# Classification report
from sklearn import metrics
print(metrics.classification_report(y_test,y_pred))
              precision    recall  f1-score   support

           0       0.31      0.23      0.26        71
           1       0.38      0.30      0.33        90
           2       0.74      0.17      0.28        80
           3       0.41      0.20      0.27       144
           4       0.17      0.30      0.21        53
           5       0.21      0.15      0.17       121
           6       0.34      0.37      0.36       179
           7       0.25      0.10      0.15        48
           8       0.18      0.66      0.29       121
           9       0.06      0.06      0.06        48
          10       0.41      0.27      0.32       132
          11       0.38      0.03      0.05       105

    accuracy                           0.26      1192
   macro avg       0.32      0.24      0.23      1192
weighted avg       0.33      0.26      0.25      1192

In [13]:
accuracy_knn_seedling = metrics.classification_report(y_test, y_pred).split()[-2]
accuracy_percentage_knn_seedling = float(accuracy_knn_seedling)*100
In [14]:
print('The Accuracy of seedling classifier using the K-NN model is :',accuracy_percentage_knn_seedling,'%')
The Accuracy of seedling classifier using the K-NN model is : 25.0 %

Plant Seedling Classifier - Using Deep neural Networks

In [15]:
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.utils.np_utils import to_categorical
from keras.layers import Activation, Dense
from keras import optimizers
In [16]:
from keras import regularizers
#Initialize the Artificial Neural Network Classifier
seedling_model = Sequential()

seedling_model.add(Dense(512, kernel_initializer = 'he_normal',input_shape = (X_.shape[1], )))
#Adding Activation function
seedling_model.add(Activation('relu'))

#Hidden Layer 1
#Adding first Hidden layer of 256 nodes
seedling_model.add(Dense(256, kernel_initializer = 'he_normal'))
#Adding Activation function
seedling_model.add(Activation('relu'))

#Hidden Layer 2
#Adding first Hidden layer of 128 nodes
seedling_model.add(Dense(128, kernel_initializer = 'he_normal'))
#Adding Activation function
seedling_model.add(Activation('relu'))

#Hidden Layer 3
#Adding first Hidden layer of 64 nodes
seedling_model.add(Dense(64, kernel_initializer = 'he_normal'))
#Adding Activation function
seedling_model.add(Activation('relu'))

#Hidden Layer 4
#Adding first Hidden layer of 32 nodes
seedling_model.add(Dense(32, kernel_initializer = 'he_normal'))
#Adding Activation function
seedling_model.add(Activation('relu'))

# Output Layer
#Adding output layer which is of 12 nodes (digits)
seedling_model.add(Dense(12))
#Adding Activation function
# Here, we are using softmax function because we have multiclass classsification
seedling_model.add(Activation('softmax'))

print(seedling_model.summary())
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense (Dense)                (None, 512)               59007488  
_________________________________________________________________
activation (Activation)      (None, 512)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 256)               131328    
_________________________________________________________________
activation_1 (Activation)    (None, 256)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 128)               32896     
_________________________________________________________________
activation_2 (Activation)    (None, 128)               0         
_________________________________________________________________
dense_3 (Dense)              (None, 64)                8256      
_________________________________________________________________
activation_3 (Activation)    (None, 64)                0         
_________________________________________________________________
dense_4 (Dense)              (None, 32)                2080      
_________________________________________________________________
activation_4 (Activation)    (None, 32)                0         
_________________________________________________________________
dense_5 (Dense)              (None, 12)                396       
_________________________________________________________________
activation_5 (Activation)    (None, 12)                0         
=================================================================
Total params: 59,182,444
Trainable params: 59,182,444
Non-trainable params: 0
_________________________________________________________________
None
In [17]:
# compiling the ANN classifier
adam = tf.keras.optimizers.Adam(lr=0.0001)
seedling_model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [18]:
# Fitting the ANN to the Training data
history = seedling_model.fit(X_, y_,batch_size = 256, epochs = 300, verbose = 1,validation_split=0.25)
Epoch 1/300
14/14 [==============================] - 298s 21s/step - loss: 33.8859 - accuracy: 0.0945 - val_loss: 25.7937 - val_accuracy: 0.0436
Epoch 2/300
14/14 [==============================] - 17s 1s/step - loss: 18.8246 - accuracy: 0.0917 - val_loss: 25.3763 - val_accuracy: 0.0940
Epoch 3/300
14/14 [==============================] - 16s 1s/step - loss: 14.1000 - accuracy: 0.0948 - val_loss: 6.4429 - val_accuracy: 0.0940
Epoch 4/300
14/14 [==============================] - 17s 1s/step - loss: 4.6264 - accuracy: 0.1183 - val_loss: 3.3533 - val_accuracy: 0.0621
Epoch 5/300
14/14 [==============================] - 17s 1s/step - loss: 2.7827 - accuracy: 0.1382 - val_loss: 2.5208 - val_accuracy: 0.1116
Epoch 6/300
14/14 [==============================] - 18s 1s/step - loss: 2.4006 - accuracy: 0.1656 - val_loss: 2.3069 - val_accuracy: 0.1468
Epoch 7/300
14/14 [==============================] - 18s 1s/step - loss: 2.2754 - accuracy: 0.1731 - val_loss: 2.2813 - val_accuracy: 0.2383
Epoch 8/300
14/14 [==============================] - 16s 1s/step - loss: 2.2642 - accuracy: 0.2014 - val_loss: 2.2910 - val_accuracy: 0.1862
Epoch 9/300
14/14 [==============================] - 15s 1s/step - loss: 2.2662 - accuracy: 0.2157 - val_loss: 2.2313 - val_accuracy: 0.1753
Epoch 10/300
14/14 [==============================] - 15s 1s/step - loss: 2.1994 - accuracy: 0.2218 - val_loss: 2.3071 - val_accuracy: 0.1720
Epoch 11/300
14/14 [==============================] - 17s 1s/step - loss: 2.2204 - accuracy: 0.2145 - val_loss: 2.2094 - val_accuracy: 0.2156
Epoch 12/300
14/14 [==============================] - 18s 1s/step - loss: 2.1378 - accuracy: 0.2534 - val_loss: 2.2859 - val_accuracy: 0.2366
Epoch 13/300
14/14 [==============================] - 17s 1s/step - loss: 2.1670 - accuracy: 0.2515 - val_loss: 2.3612 - val_accuracy: 0.1586
Epoch 14/300
14/14 [==============================] - 17s 1s/step - loss: 2.1570 - accuracy: 0.2613 - val_loss: 2.1870 - val_accuracy: 0.2139
Epoch 15/300
14/14 [==============================] - 17s 1s/step - loss: 2.0614 - accuracy: 0.2903 - val_loss: 2.1186 - val_accuracy: 0.3112
Epoch 16/300
14/14 [==============================] - 17s 1s/step - loss: 2.0211 - accuracy: 0.3234 - val_loss: 2.0836 - val_accuracy: 0.3146
Epoch 17/300
14/14 [==============================] - 18s 1s/step - loss: 2.0121 - accuracy: 0.3203 - val_loss: 2.0644 - val_accuracy: 0.3440
Epoch 18/300
14/14 [==============================] - 18s 1s/step - loss: 2.0119 - accuracy: 0.3217 - val_loss: 2.0587 - val_accuracy: 0.3372
Epoch 19/300
14/14 [==============================] - 17s 1s/step - loss: 1.9844 - accuracy: 0.3097 - val_loss: 2.1894 - val_accuracy: 0.2584
Epoch 20/300
14/14 [==============================] - 13s 963ms/step - loss: 1.9921 - accuracy: 0.3175 - val_loss: 2.0022 - val_accuracy: 0.3188
Epoch 21/300
14/14 [==============================] - 10s 682ms/step - loss: 1.9267 - accuracy: 0.3564 - val_loss: 2.0371 - val_accuracy: 0.2458
Epoch 22/300
14/14 [==============================] - 9s 669ms/step - loss: 1.9157 - accuracy: 0.3499 - val_loss: 2.0399 - val_accuracy: 0.3339
Epoch 23/300
14/14 [==============================] - 9s 663ms/step - loss: 1.8888 - accuracy: 0.3687 - val_loss: 1.9892 - val_accuracy: 0.2727
Epoch 24/300
14/14 [==============================] - 9s 669ms/step - loss: 1.8840 - accuracy: 0.3566 - val_loss: 2.0170 - val_accuracy: 0.3112
Epoch 25/300
14/14 [==============================] - 10s 684ms/step - loss: 1.9026 - accuracy: 0.3301 - val_loss: 1.9690 - val_accuracy: 0.3238
Epoch 26/300
14/14 [==============================] - 10s 684ms/step - loss: 1.8147 - accuracy: 0.3994 - val_loss: 1.9157 - val_accuracy: 0.3649
Epoch 27/300
14/14 [==============================] - 10s 680ms/step - loss: 1.8065 - accuracy: 0.3899 - val_loss: 1.9122 - val_accuracy: 0.3742
Epoch 28/300
14/14 [==============================] - 10s 680ms/step - loss: 1.8285 - accuracy: 0.3709 - val_loss: 1.9954 - val_accuracy: 0.3356
Epoch 29/300
14/14 [==============================] - 10s 682ms/step - loss: 1.8311 - accuracy: 0.3611 - val_loss: 1.9258 - val_accuracy: 0.3473
Epoch 30/300
14/14 [==============================] - 10s 684ms/step - loss: 1.7298 - accuracy: 0.4213 - val_loss: 1.8548 - val_accuracy: 0.3792
Epoch 31/300
14/14 [==============================] - 10s 682ms/step - loss: 1.6818 - accuracy: 0.4380 - val_loss: 1.8480 - val_accuracy: 0.3624
Epoch 32/300
14/14 [==============================] - 10s 690ms/step - loss: 1.6622 - accuracy: 0.4445 - val_loss: 1.8229 - val_accuracy: 0.3977
Epoch 33/300
14/14 [==============================] - 10s 689ms/step - loss: 1.7219 - accuracy: 0.4064 - val_loss: 2.0870 - val_accuracy: 0.2475
Epoch 34/300
14/14 [==============================] - 10s 690ms/step - loss: 1.7966 - accuracy: 0.3583 - val_loss: 1.8782 - val_accuracy: 0.3658
Epoch 35/300
14/14 [==============================] - 10s 689ms/step - loss: 1.7040 - accuracy: 0.3905 - val_loss: 1.8544 - val_accuracy: 0.3666
Epoch 36/300
14/14 [==============================] - 10s 702ms/step - loss: 1.7145 - accuracy: 0.3952 - val_loss: 1.8638 - val_accuracy: 0.3532
Epoch 37/300
14/14 [==============================] - 10s 695ms/step - loss: 1.6924 - accuracy: 0.4008 - val_loss: 1.8956 - val_accuracy: 0.3490
Epoch 38/300
14/14 [==============================] - 10s 682ms/step - loss: 1.6627 - accuracy: 0.4190 - val_loss: 2.0950 - val_accuracy: 0.3205
Epoch 39/300
14/14 [==============================] - 10s 688ms/step - loss: 1.6780 - accuracy: 0.4070 - val_loss: 1.8932 - val_accuracy: 0.3456
Epoch 40/300
14/14 [==============================] - 10s 686ms/step - loss: 1.6447 - accuracy: 0.4162 - val_loss: 1.8353 - val_accuracy: 0.3725
Epoch 41/300
14/14 [==============================] - 10s 682ms/step - loss: 1.5848 - accuracy: 0.4358 - val_loss: 1.8299 - val_accuracy: 0.3641
Epoch 42/300
14/14 [==============================] - 10s 681ms/step - loss: 1.5619 - accuracy: 0.4593 - val_loss: 1.7898 - val_accuracy: 0.3624
Epoch 43/300
14/14 [==============================] - 10s 694ms/step - loss: 1.5056 - accuracy: 0.4895 - val_loss: 1.8097 - val_accuracy: 0.3691
Epoch 44/300
14/14 [==============================] - 10s 680ms/step - loss: 1.5161 - accuracy: 0.4685 - val_loss: 1.8246 - val_accuracy: 0.3515
Epoch 45/300
14/14 [==============================] - 10s 699ms/step - loss: 1.5003 - accuracy: 0.4641 - val_loss: 1.7354 - val_accuracy: 0.3909
Epoch 46/300
14/14 [==============================] - 10s 735ms/step - loss: 1.4544 - accuracy: 0.4915 - val_loss: 1.6970 - val_accuracy: 0.4060
Epoch 47/300
14/14 [==============================] - 10s 686ms/step - loss: 1.4480 - accuracy: 0.4993 - val_loss: 1.7140 - val_accuracy: 0.3968
Epoch 48/300
14/14 [==============================] - 11s 778ms/step - loss: 1.4165 - accuracy: 0.5094 - val_loss: 1.7954 - val_accuracy: 0.3616
Epoch 49/300
14/14 [==============================] - 10s 731ms/step - loss: 1.4139 - accuracy: 0.5057 - val_loss: 1.7218 - val_accuracy: 0.4086
Epoch 50/300
14/14 [==============================] - 10s 695ms/step - loss: 1.4430 - accuracy: 0.4934 - val_loss: 1.7725 - val_accuracy: 0.3834
Epoch 51/300
14/14 [==============================] - 10s 733ms/step - loss: 1.3924 - accuracy: 0.5091 - val_loss: 1.6508 - val_accuracy: 0.4228
Epoch 52/300
14/14 [==============================] - 10s 683ms/step - loss: 1.3823 - accuracy: 0.5158 - val_loss: 1.7389 - val_accuracy: 0.3842
Epoch 53/300
14/14 [==============================] - 10s 683ms/step - loss: 1.3581 - accuracy: 0.5278 - val_loss: 1.7029 - val_accuracy: 0.4069
Epoch 54/300
14/14 [==============================] - 10s 683ms/step - loss: 1.3605 - accuracy: 0.5225 - val_loss: 1.6999 - val_accuracy: 0.3926
Epoch 55/300
14/14 [==============================] - 10s 683ms/step - loss: 1.3025 - accuracy: 0.5471 - val_loss: 1.6531 - val_accuracy: 0.4245
Epoch 56/300
14/14 [==============================] - 10s 681ms/step - loss: 1.3159 - accuracy: 0.5505 - val_loss: 1.6628 - val_accuracy: 0.4203
Epoch 57/300
14/14 [==============================] - 10s 708ms/step - loss: 1.3280 - accuracy: 0.5404 - val_loss: 1.7106 - val_accuracy: 0.4094
Epoch 58/300
14/14 [==============================] - 10s 685ms/step - loss: 1.3427 - accuracy: 0.5312 - val_loss: 1.6984 - val_accuracy: 0.4102
Epoch 59/300
14/14 [==============================] - 10s 704ms/step - loss: 1.2624 - accuracy: 0.5625 - val_loss: 1.6794 - val_accuracy: 0.3859
Epoch 60/300
14/14 [==============================] - 10s 690ms/step - loss: 1.3155 - accuracy: 0.5323 - val_loss: 1.6731 - val_accuracy: 0.4178
Epoch 61/300
14/14 [==============================] - 9s 678ms/step - loss: 1.4333 - accuracy: 0.4920 - val_loss: 2.0371 - val_accuracy: 0.3515
Epoch 62/300
14/14 [==============================] - 9s 678ms/step - loss: 1.4825 - accuracy: 0.4641 - val_loss: 1.6686 - val_accuracy: 0.4211
Epoch 63/300
14/14 [==============================] - 10s 679ms/step - loss: 1.2494 - accuracy: 0.5614 - val_loss: 1.5976 - val_accuracy: 0.4404
Epoch 64/300
14/14 [==============================] - 10s 684ms/step - loss: 1.2110 - accuracy: 0.5807 - val_loss: 1.7256 - val_accuracy: 0.4094
Epoch 65/300
14/14 [==============================] - 10s 683ms/step - loss: 1.2574 - accuracy: 0.5564 - val_loss: 1.7996 - val_accuracy: 0.3725
Epoch 66/300
14/14 [==============================] - 10s 682ms/step - loss: 1.2023 - accuracy: 0.5838 - val_loss: 1.7727 - val_accuracy: 0.3876
Epoch 67/300
14/14 [==============================] - 10s 679ms/step - loss: 1.1748 - accuracy: 0.5849 - val_loss: 1.8753 - val_accuracy: 0.3876
Epoch 68/300
14/14 [==============================] - 9s 678ms/step - loss: 1.2601 - accuracy: 0.5387 - val_loss: 1.9152 - val_accuracy: 0.3674
Epoch 69/300
14/14 [==============================] - 10s 683ms/step - loss: 1.2656 - accuracy: 0.5371 - val_loss: 1.9466 - val_accuracy: 0.3540
Epoch 70/300
14/14 [==============================] - 10s 680ms/step - loss: 1.3200 - accuracy: 0.5211 - val_loss: 1.6658 - val_accuracy: 0.4186
Epoch 71/300
14/14 [==============================] - 9s 677ms/step - loss: 1.1607 - accuracy: 0.5919 - val_loss: 1.7198 - val_accuracy: 0.3943
Epoch 72/300
14/14 [==============================] - 10s 680ms/step - loss: 1.1845 - accuracy: 0.5849 - val_loss: 1.7896 - val_accuracy: 0.3758
Epoch 73/300
14/14 [==============================] - 10s 684ms/step - loss: 1.1414 - accuracy: 0.5978 - val_loss: 1.6202 - val_accuracy: 0.4513
Epoch 74/300
14/14 [==============================] - 10s 683ms/step - loss: 1.1093 - accuracy: 0.6252 - val_loss: 1.8259 - val_accuracy: 0.3784
Epoch 75/300
14/14 [==============================] - 10s 679ms/step - loss: 1.1451 - accuracy: 0.5955 - val_loss: 1.7898 - val_accuracy: 0.3909
Epoch 76/300
14/14 [==============================] - 10s 695ms/step - loss: 1.1437 - accuracy: 0.5924 - val_loss: 1.6497 - val_accuracy: 0.4295
Epoch 77/300
14/14 [==============================] - 10s 679ms/step - loss: 1.0775 - accuracy: 0.6246 - val_loss: 1.5800 - val_accuracy: 0.4564
Epoch 78/300
14/14 [==============================] - 9s 678ms/step - loss: 1.0097 - accuracy: 0.6585 - val_loss: 1.5875 - val_accuracy: 0.4421
Epoch 79/300
14/14 [==============================] - 10s 682ms/step - loss: 1.0209 - accuracy: 0.6445 - val_loss: 1.6195 - val_accuracy: 0.4388
Epoch 80/300
14/14 [==============================] - 10s 681ms/step - loss: 1.0270 - accuracy: 0.6492 - val_loss: 1.7090 - val_accuracy: 0.4320
Epoch 81/300
14/14 [==============================] - 10s 679ms/step - loss: 1.0324 - accuracy: 0.6397 - val_loss: 1.6974 - val_accuracy: 0.4153
Epoch 82/300
14/14 [==============================] - 10s 680ms/step - loss: 1.0748 - accuracy: 0.6193 - val_loss: 1.7334 - val_accuracy: 0.4320
Epoch 83/300
14/14 [==============================] - 9s 676ms/step - loss: 1.0208 - accuracy: 0.6512 - val_loss: 1.5670 - val_accuracy: 0.4589
Epoch 84/300
14/14 [==============================] - 11s 772ms/step - loss: 1.1516 - accuracy: 0.5922 - val_loss: 1.8742 - val_accuracy: 0.4077
Epoch 85/300
14/14 [==============================] - 11s 788ms/step - loss: 1.1831 - accuracy: 0.5642 - val_loss: 1.9321 - val_accuracy: 0.3935
Epoch 86/300
14/14 [==============================] - 10s 715ms/step - loss: 1.1048 - accuracy: 0.6036 - val_loss: 1.8849 - val_accuracy: 0.4052
Epoch 87/300
14/14 [==============================] - 10s 709ms/step - loss: 1.0732 - accuracy: 0.6210 - val_loss: 1.7134 - val_accuracy: 0.4044
Epoch 88/300
14/14 [==============================] - 10s 705ms/step - loss: 1.0487 - accuracy: 0.6201 - val_loss: 1.6236 - val_accuracy: 0.4455
Epoch 89/300
14/14 [==============================] - 10s 693ms/step - loss: 1.0208 - accuracy: 0.6372 - val_loss: 1.7244 - val_accuracy: 0.4186
Epoch 90/300
14/14 [==============================] - 10s 680ms/step - loss: 0.9480 - accuracy: 0.6814 - val_loss: 1.7196 - val_accuracy: 0.4346
Epoch 91/300
14/14 [==============================] - 10s 679ms/step - loss: 0.8929 - accuracy: 0.7004 - val_loss: 1.6262 - val_accuracy: 0.4522
Epoch 92/300
14/14 [==============================] - 10s 679ms/step - loss: 0.8431 - accuracy: 0.7295 - val_loss: 1.6417 - val_accuracy: 0.4513
Epoch 93/300
14/14 [==============================] - 10s 691ms/step - loss: 0.9431 - accuracy: 0.6660 - val_loss: 1.6556 - val_accuracy: 0.4354
Epoch 94/300
14/14 [==============================] - 10s 690ms/step - loss: 0.8764 - accuracy: 0.7027 - val_loss: 1.7708 - val_accuracy: 0.4329
Epoch 95/300
14/14 [==============================] - 10s 688ms/step - loss: 0.8546 - accuracy: 0.7130 - val_loss: 1.6401 - val_accuracy: 0.4589
Epoch 96/300
14/14 [==============================] - 10s 687ms/step - loss: 0.8418 - accuracy: 0.7180 - val_loss: 1.7998 - val_accuracy: 0.4027
Epoch 97/300
14/14 [==============================] - 10s 684ms/step - loss: 0.8590 - accuracy: 0.7069 - val_loss: 1.6672 - val_accuracy: 0.4430
Epoch 98/300
14/14 [==============================] - 10s 681ms/step - loss: 0.7980 - accuracy: 0.7413 - val_loss: 1.6524 - val_accuracy: 0.4471
Epoch 99/300
14/14 [==============================] - 10s 683ms/step - loss: 0.8256 - accuracy: 0.7200 - val_loss: 1.7539 - val_accuracy: 0.4362
Epoch 100/300
14/14 [==============================] - 10s 680ms/step - loss: 1.1944 - accuracy: 0.5673 - val_loss: 2.1615 - val_accuracy: 0.3591
Epoch 101/300
14/14 [==============================] - 10s 681ms/step - loss: 0.9585 - accuracy: 0.6674 - val_loss: 1.6601 - val_accuracy: 0.4589
Epoch 102/300
14/14 [==============================] - 10s 680ms/step - loss: 0.8409 - accuracy: 0.7250 - val_loss: 1.8272 - val_accuracy: 0.4203
Epoch 103/300
14/14 [==============================] - 10s 682ms/step - loss: 0.9661 - accuracy: 0.6590 - val_loss: 1.6938 - val_accuracy: 0.4547
Epoch 104/300
14/14 [==============================] - 10s 686ms/step - loss: 0.8385 - accuracy: 0.7097 - val_loss: 1.9978 - val_accuracy: 0.3977
Epoch 105/300
14/14 [==============================] - 10s 681ms/step - loss: 0.8990 - accuracy: 0.6780 - val_loss: 1.6781 - val_accuracy: 0.4572
Epoch 106/300
14/14 [==============================] - 10s 682ms/step - loss: 0.7752 - accuracy: 0.7552 - val_loss: 1.6363 - val_accuracy: 0.4631
Epoch 107/300
14/14 [==============================] - 10s 680ms/step - loss: 0.8379 - accuracy: 0.7152 - val_loss: 1.8666 - val_accuracy: 0.4262
Epoch 108/300
14/14 [==============================] - 10s 686ms/step - loss: 1.0093 - accuracy: 0.6369 - val_loss: 2.1321 - val_accuracy: 0.3817
Epoch 109/300
14/14 [==============================] - 10s 682ms/step - loss: 1.0907 - accuracy: 0.6050 - val_loss: 2.0090 - val_accuracy: 0.4161
Epoch 110/300
14/14 [==============================] - 11s 766ms/step - loss: 0.9236 - accuracy: 0.6727 - val_loss: 1.8014 - val_accuracy: 0.4471
Epoch 111/300
14/14 [==============================] - 11s 792ms/step - loss: 0.9871 - accuracy: 0.6417 - val_loss: 1.8330 - val_accuracy: 0.4270
Epoch 112/300
14/14 [==============================] - 10s 693ms/step - loss: 0.9232 - accuracy: 0.6587 - val_loss: 1.8913 - val_accuracy: 0.4312
Epoch 113/300
14/14 [==============================] - 10s 733ms/step - loss: 0.8681 - accuracy: 0.6850 - val_loss: 1.8192 - val_accuracy: 0.4337
Epoch 114/300
14/14 [==============================] - 10s 687ms/step - loss: 0.7892 - accuracy: 0.7309 - val_loss: 1.7023 - val_accuracy: 0.4572
Epoch 115/300
14/14 [==============================] - 10s 685ms/step - loss: 0.7319 - accuracy: 0.7471 - val_loss: 1.6833 - val_accuracy: 0.4690
Epoch 116/300
14/14 [==============================] - 10s 686ms/step - loss: 0.6572 - accuracy: 0.7866 - val_loss: 1.8607 - val_accuracy: 0.4404
Epoch 117/300
14/14 [==============================] - 9s 678ms/step - loss: 0.6489 - accuracy: 0.7883 - val_loss: 1.9234 - val_accuracy: 0.4404
Epoch 118/300
14/14 [==============================] - 9s 678ms/step - loss: 0.7531 - accuracy: 0.7323 - val_loss: 1.7984 - val_accuracy: 0.4404
Epoch 119/300
14/14 [==============================] - 9s 678ms/step - loss: 0.6822 - accuracy: 0.7796 - val_loss: 1.8586 - val_accuracy: 0.4228
Epoch 120/300
14/14 [==============================] - 9s 678ms/step - loss: 0.8109 - accuracy: 0.7032 - val_loss: 1.9469 - val_accuracy: 0.4153
Epoch 121/300
14/14 [==============================] - 10s 684ms/step - loss: 0.8013 - accuracy: 0.7091 - val_loss: 1.7019 - val_accuracy: 0.4681
Epoch 122/300
14/14 [==============================] - 10s 686ms/step - loss: 0.6200 - accuracy: 0.8008 - val_loss: 1.6671 - val_accuracy: 0.4773
Epoch 123/300
14/14 [==============================] - 10s 688ms/step - loss: 0.5740 - accuracy: 0.8218 - val_loss: 1.7100 - val_accuracy: 0.4698
Epoch 124/300
14/14 [==============================] - 10s 690ms/step - loss: 0.5854 - accuracy: 0.8157 - val_loss: 1.8278 - val_accuracy: 0.4681
Epoch 125/300
14/14 [==============================] - 9s 678ms/step - loss: 0.6075 - accuracy: 0.7894 - val_loss: 2.0750 - val_accuracy: 0.4237
Epoch 126/300
14/14 [==============================] - 9s 678ms/step - loss: 0.8401 - accuracy: 0.7127 - val_loss: 2.1347 - val_accuracy: 0.3851
Epoch 127/300
14/14 [==============================] - 10s 684ms/step - loss: 0.8885 - accuracy: 0.6850 - val_loss: 2.0151 - val_accuracy: 0.4346
Epoch 128/300
14/14 [==============================] - 10s 681ms/step - loss: 0.6470 - accuracy: 0.7832 - val_loss: 1.7792 - val_accuracy: 0.4664
Epoch 129/300
14/14 [==============================] - 10s 679ms/step - loss: 0.5590 - accuracy: 0.8187 - val_loss: 1.6995 - val_accuracy: 0.4883
Epoch 130/300
14/14 [==============================] - 10s 681ms/step - loss: 0.5745 - accuracy: 0.8137 - val_loss: 1.8635 - val_accuracy: 0.4606
Epoch 131/300
14/14 [==============================] - 9s 677ms/step - loss: 0.6770 - accuracy: 0.7664 - val_loss: 2.0373 - val_accuracy: 0.4539
Epoch 132/300
14/14 [==============================] - 10s 680ms/step - loss: 0.6062 - accuracy: 0.7997 - val_loss: 1.9147 - val_accuracy: 0.4782
Epoch 133/300
14/14 [==============================] - 10s 686ms/step - loss: 0.7140 - accuracy: 0.7424 - val_loss: 2.1350 - val_accuracy: 0.4086
Epoch 134/300
14/14 [==============================] - 10s 708ms/step - loss: 0.8525 - accuracy: 0.6892 - val_loss: 2.2400 - val_accuracy: 0.4413
Epoch 135/300
14/14 [==============================] - 9s 679ms/step - loss: 0.8241 - accuracy: 0.7200 - val_loss: 1.8796 - val_accuracy: 0.4564
Epoch 136/300
14/14 [==============================] - 10s 683ms/step - loss: 0.5960 - accuracy: 0.7992 - val_loss: 1.8916 - val_accuracy: 0.4690
Epoch 137/300
14/14 [==============================] - 10s 681ms/step - loss: 0.5054 - accuracy: 0.8422 - val_loss: 1.8591 - val_accuracy: 0.4681
Epoch 138/300
14/14 [==============================] - 10s 680ms/step - loss: 0.4925 - accuracy: 0.8529 - val_loss: 1.8008 - val_accuracy: 0.4841
Epoch 139/300
14/14 [==============================] - 10s 684ms/step - loss: 0.5060 - accuracy: 0.8422 - val_loss: 1.8096 - val_accuracy: 0.4958
Epoch 140/300
14/14 [==============================] - 10s 689ms/step - loss: 0.5375 - accuracy: 0.8232 - val_loss: 1.9316 - val_accuracy: 0.4530
Epoch 141/300
14/14 [==============================] - 10s 683ms/step - loss: 0.5779 - accuracy: 0.8126 - val_loss: 1.9334 - val_accuracy: 0.4757
Epoch 142/300
14/14 [==============================] - 10s 683ms/step - loss: 0.6524 - accuracy: 0.7692 - val_loss: 2.1108 - val_accuracy: 0.4438
Epoch 143/300
14/14 [==============================] - 9s 677ms/step - loss: 0.5134 - accuracy: 0.8333 - val_loss: 2.0318 - val_accuracy: 0.4631
Epoch 144/300
14/14 [==============================] - 9s 678ms/step - loss: 0.6072 - accuracy: 0.7857 - val_loss: 2.3513 - val_accuracy: 0.4027
Epoch 145/300
14/14 [==============================] - 10s 680ms/step - loss: 0.6920 - accuracy: 0.7413 - val_loss: 1.8933 - val_accuracy: 0.4824
Epoch 146/300
14/14 [==============================] - 10s 684ms/step - loss: 0.4924 - accuracy: 0.8375 - val_loss: 1.9005 - val_accuracy: 0.4698
Epoch 147/300
14/14 [==============================] - 10s 680ms/step - loss: 0.4035 - accuracy: 0.8884 - val_loss: 1.9329 - val_accuracy: 0.4824
Epoch 148/300
14/14 [==============================] - 10s 682ms/step - loss: 0.4032 - accuracy: 0.8842 - val_loss: 1.8771 - val_accuracy: 0.4824
Epoch 149/300
14/14 [==============================] - 10s 681ms/step - loss: 0.5067 - accuracy: 0.8193 - val_loss: 1.9335 - val_accuracy: 0.4706
Epoch 150/300
14/14 [==============================] - 10s 679ms/step - loss: 0.4597 - accuracy: 0.8529 - val_loss: 1.9066 - val_accuracy: 0.4983
Epoch 151/300
14/14 [==============================] - 9s 677ms/step - loss: 0.5018 - accuracy: 0.8313 - val_loss: 1.9123 - val_accuracy: 0.4799
Epoch 152/300
14/14 [==============================] - 10s 692ms/step - loss: 0.4118 - accuracy: 0.8764 - val_loss: 2.1084 - val_accuracy: 0.4622
Epoch 153/300
14/14 [==============================] - 10s 684ms/step - loss: 0.4881 - accuracy: 0.8355 - val_loss: 1.8444 - val_accuracy: 0.4799
Epoch 154/300
14/14 [==============================] - 10s 679ms/step - loss: 0.3873 - accuracy: 0.8873 - val_loss: 2.0062 - val_accuracy: 0.4790
Epoch 155/300
14/14 [==============================] - 10s 684ms/step - loss: 0.3837 - accuracy: 0.8906 - val_loss: 1.8699 - val_accuracy: 0.5042
Epoch 156/300
14/14 [==============================] - 10s 683ms/step - loss: 0.3764 - accuracy: 0.8876 - val_loss: 2.0027 - val_accuracy: 0.4664
Epoch 157/300
14/14 [==============================] - 10s 695ms/step - loss: 0.4040 - accuracy: 0.8769 - val_loss: 2.0782 - val_accuracy: 0.4874
Epoch 158/300
14/14 [==============================] - 10s 682ms/step - loss: 0.3856 - accuracy: 0.8797 - val_loss: 2.2729 - val_accuracy: 0.4337
Epoch 159/300
14/14 [==============================] - 10s 679ms/step - loss: 0.3935 - accuracy: 0.8761 - val_loss: 1.9342 - val_accuracy: 0.4933
Epoch 160/300
14/14 [==============================] - 10s 680ms/step - loss: 0.4049 - accuracy: 0.8632 - val_loss: 2.1078 - val_accuracy: 0.4631
Epoch 161/300
14/14 [==============================] - 9s 678ms/step - loss: 0.3652 - accuracy: 0.8831 - val_loss: 2.0208 - val_accuracy: 0.5109
Epoch 162/300
14/14 [==============================] - 10s 696ms/step - loss: 0.4221 - accuracy: 0.8596 - val_loss: 2.0795 - val_accuracy: 0.4723
Epoch 163/300
14/14 [==============================] - 10s 708ms/step - loss: 1.0679 - accuracy: 0.6951 - val_loss: 4.3702 - val_accuracy: 0.2399
Epoch 164/300
14/14 [==============================] - 10s 682ms/step - loss: 5.2096 - accuracy: 0.1799 - val_loss: 3.3349 - val_accuracy: 0.1711
Epoch 165/300
14/14 [==============================] - 10s 706ms/step - loss: 2.1837 - accuracy: 0.2683 - val_loss: 1.8870 - val_accuracy: 0.3440
Epoch 166/300
14/14 [==============================] - 10s 714ms/step - loss: 1.4535 - accuracy: 0.4792 - val_loss: 1.7311 - val_accuracy: 0.4354
Epoch 167/300
14/14 [==============================] - 10s 712ms/step - loss: 1.2617 - accuracy: 0.5561 - val_loss: 2.3555 - val_accuracy: 0.3096
Epoch 168/300
14/14 [==============================] - 10s 684ms/step - loss: 1.2760 - accuracy: 0.5491 - val_loss: 1.7113 - val_accuracy: 0.4262
Epoch 169/300
14/14 [==============================] - 10s 679ms/step - loss: 1.0184 - accuracy: 0.6643 - val_loss: 1.6636 - val_accuracy: 0.4304
Epoch 170/300
14/14 [==============================] - 9s 677ms/step - loss: 1.5437 - accuracy: 0.4557 - val_loss: 1.7365 - val_accuracy: 0.3733
Epoch 171/300
14/14 [==============================] - 9s 676ms/step - loss: 1.2921 - accuracy: 0.5379 - val_loss: 1.5964 - val_accuracy: 0.4262
Epoch 172/300
14/14 [==============================] - 9s 677ms/step - loss: 1.0252 - accuracy: 0.6324 - val_loss: 1.7523 - val_accuracy: 0.4237
Epoch 173/300
14/14 [==============================] - 9s 677ms/step - loss: 1.0833 - accuracy: 0.6028 - val_loss: 2.4349 - val_accuracy: 0.3079
Epoch 174/300
14/14 [==============================] - 10s 681ms/step - loss: 1.6523 - accuracy: 0.4414 - val_loss: 1.8558 - val_accuracy: 0.3591
Epoch 175/300
14/14 [==============================] - 10s 685ms/step - loss: 1.0445 - accuracy: 0.6378 - val_loss: 1.5423 - val_accuracy: 0.5000
Epoch 176/300
14/14 [==============================] - 10s 680ms/step - loss: 0.8222 - accuracy: 0.7443 - val_loss: 1.6802 - val_accuracy: 0.4732
Epoch 177/300
14/14 [==============================] - 9s 676ms/step - loss: 0.7705 - accuracy: 0.7427 - val_loss: 2.1020 - val_accuracy: 0.4077
Epoch 178/300
14/14 [==============================] - 9s 676ms/step - loss: 1.2994 - accuracy: 0.5382 - val_loss: 1.8978 - val_accuracy: 0.4144
Epoch 179/300
14/14 [==============================] - 9s 678ms/step - loss: 0.9549 - accuracy: 0.6573 - val_loss: 1.7450 - val_accuracy: 0.4354
Epoch 180/300
14/14 [==============================] - 10s 679ms/step - loss: 0.7943 - accuracy: 0.7415 - val_loss: 1.6400 - val_accuracy: 0.4883
Epoch 181/300
14/14 [==============================] - 10s 692ms/step - loss: 0.7621 - accuracy: 0.7443 - val_loss: 1.7103 - val_accuracy: 0.4891
Epoch 182/300
14/14 [==============================] - 10s 680ms/step - loss: 0.7188 - accuracy: 0.7566 - val_loss: 1.6289 - val_accuracy: 0.4824
Epoch 183/300
14/14 [==============================] - 9s 675ms/step - loss: 0.6367 - accuracy: 0.8022 - val_loss: 1.7668 - val_accuracy: 0.4740
Epoch 184/300
14/14 [==============================] - 9s 676ms/step - loss: 0.6521 - accuracy: 0.7785 - val_loss: 1.7845 - val_accuracy: 0.4807
Epoch 185/300
14/14 [==============================] - 9s 677ms/step - loss: 0.7631 - accuracy: 0.7259 - val_loss: 1.7305 - val_accuracy: 0.4790
Epoch 186/300
14/14 [==============================] - 10s 685ms/step - loss: 0.5884 - accuracy: 0.8182 - val_loss: 1.7814 - val_accuracy: 0.4715
Epoch 187/300
14/14 [==============================] - 9s 677ms/step - loss: 0.5657 - accuracy: 0.8210 - val_loss: 2.0845 - val_accuracy: 0.4388
Epoch 188/300
14/14 [==============================] - 10s 681ms/step - loss: 0.6085 - accuracy: 0.7952 - val_loss: 1.8619 - val_accuracy: 0.4799
Epoch 189/300
14/14 [==============================] - 9s 677ms/step - loss: 0.5579 - accuracy: 0.8224 - val_loss: 1.7783 - val_accuracy: 0.4958
Epoch 190/300
14/14 [==============================] - 9s 675ms/step - loss: 0.5698 - accuracy: 0.8076 - val_loss: 1.8065 - val_accuracy: 0.4799
Epoch 191/300
14/14 [==============================] - 10s 746ms/step - loss: 0.4885 - accuracy: 0.8484 - val_loss: 2.4481 - val_accuracy: 0.3935
Epoch 192/300
14/14 [==============================] - 11s 798ms/step - loss: 0.7174 - accuracy: 0.7432 - val_loss: 1.8198 - val_accuracy: 0.4690
Epoch 193/300
14/14 [==============================] - 10s 689ms/step - loss: 0.5004 - accuracy: 0.8417 - val_loss: 1.8046 - val_accuracy: 0.4748
Epoch 194/300
14/14 [==============================] - 10s 694ms/step - loss: 0.4553 - accuracy: 0.8632 - val_loss: 1.8208 - val_accuracy: 0.4950
Epoch 195/300
14/14 [==============================] - 10s 737ms/step - loss: 0.6412 - accuracy: 0.7810 - val_loss: 1.9010 - val_accuracy: 0.4664
Epoch 196/300
14/14 [==============================] - 10s 681ms/step - loss: 0.8293 - accuracy: 0.6940 - val_loss: 2.1669 - val_accuracy: 0.4253
Epoch 197/300
14/14 [==============================] - 10s 689ms/step - loss: 0.6047 - accuracy: 0.7860 - val_loss: 1.8734 - val_accuracy: 0.4639
Epoch 198/300
14/14 [==============================] - 10s 681ms/step - loss: 0.4863 - accuracy: 0.8492 - val_loss: 1.7705 - val_accuracy: 0.4933
Epoch 199/300
14/14 [==============================] - 10s 679ms/step - loss: 0.4239 - accuracy: 0.8727 - val_loss: 2.0842 - val_accuracy: 0.4614
Epoch 200/300
14/14 [==============================] - 10s 679ms/step - loss: 0.4601 - accuracy: 0.8537 - val_loss: 1.9753 - val_accuracy: 0.4824
Epoch 201/300
14/14 [==============================] - 9s 678ms/step - loss: 0.5025 - accuracy: 0.8313 - val_loss: 2.3486 - val_accuracy: 0.4379
Epoch 202/300
14/14 [==============================] - 9s 675ms/step - loss: 0.5708 - accuracy: 0.8022 - val_loss: 2.1765 - val_accuracy: 0.4656
Epoch 203/300
14/14 [==============================] - 10s 683ms/step - loss: 0.5960 - accuracy: 0.7958 - val_loss: 1.8577 - val_accuracy: 0.4950
Epoch 204/300
14/14 [==============================] - 10s 685ms/step - loss: 0.5114 - accuracy: 0.8185 - val_loss: 2.0876 - val_accuracy: 0.4698
Epoch 205/300
14/14 [==============================] - 10s 681ms/step - loss: 0.5255 - accuracy: 0.8140 - val_loss: 2.0751 - val_accuracy: 0.4866
Epoch 206/300
14/14 [==============================] - 10s 680ms/step - loss: 0.5210 - accuracy: 0.8246 - val_loss: 2.0480 - val_accuracy: 0.4547
Epoch 207/300
14/14 [==============================] - 10s 679ms/step - loss: 0.3850 - accuracy: 0.8814 - val_loss: 1.9740 - val_accuracy: 0.5042
Epoch 208/300
14/14 [==============================] - 10s 680ms/step - loss: 0.3948 - accuracy: 0.8761 - val_loss: 2.2080 - val_accuracy: 0.4748
Epoch 209/300
14/14 [==============================] - 9s 678ms/step - loss: 0.4261 - accuracy: 0.8587 - val_loss: 2.0478 - val_accuracy: 0.4933
Epoch 210/300
14/14 [==============================] - 10s 693ms/step - loss: 0.4633 - accuracy: 0.8439 - val_loss: 2.2699 - val_accuracy: 0.4329
Epoch 211/300
14/14 [==============================] - 10s 697ms/step - loss: 0.4472 - accuracy: 0.8417 - val_loss: 2.0557 - val_accuracy: 0.4664
Epoch 212/300
14/14 [==============================] - 10s 686ms/step - loss: 0.3301 - accuracy: 0.9041 - val_loss: 1.9892 - val_accuracy: 0.5059
Epoch 213/300
14/14 [==============================] - 10s 690ms/step - loss: 0.3824 - accuracy: 0.8761 - val_loss: 2.2881 - val_accuracy: 0.4656
Epoch 214/300
14/14 [==============================] - 10s 686ms/step - loss: 0.4072 - accuracy: 0.8610 - val_loss: 2.1235 - val_accuracy: 0.4824
Epoch 215/300
14/14 [==============================] - 10s 686ms/step - loss: 0.5421 - accuracy: 0.8109 - val_loss: 2.2536 - val_accuracy: 0.4622
Epoch 216/300
14/14 [==============================] - 10s 686ms/step - loss: 0.4492 - accuracy: 0.8464 - val_loss: 2.1929 - val_accuracy: 0.4908
Epoch 217/300
14/14 [==============================] - 10s 682ms/step - loss: 0.3983 - accuracy: 0.8708 - val_loss: 1.9749 - val_accuracy: 0.5008
Epoch 218/300
14/14 [==============================] - 10s 684ms/step - loss: 0.2918 - accuracy: 0.9183 - val_loss: 2.0960 - val_accuracy: 0.4975
Epoch 219/300
14/14 [==============================] - 10s 681ms/step - loss: 0.3299 - accuracy: 0.9035 - val_loss: 2.6177 - val_accuracy: 0.4421
Epoch 220/300
14/14 [==============================] - 10s 683ms/step - loss: 0.9746 - accuracy: 0.6769 - val_loss: 2.4281 - val_accuracy: 0.4094
Epoch 221/300
14/14 [==============================] - 10s 689ms/step - loss: 0.6056 - accuracy: 0.7905 - val_loss: 1.9376 - val_accuracy: 0.4950
Epoch 222/300
14/14 [==============================] - 10s 680ms/step - loss: 0.4222 - accuracy: 0.8627 - val_loss: 2.1187 - val_accuracy: 0.4706
Epoch 223/300
14/14 [==============================] - 9s 677ms/step - loss: 0.6047 - accuracy: 0.7813 - val_loss: 2.0931 - val_accuracy: 0.4807
Epoch 224/300
14/14 [==============================] - 9s 678ms/step - loss: 0.3205 - accuracy: 0.9127 - val_loss: 1.9806 - val_accuracy: 0.5017
Epoch 225/300
14/14 [==============================] - 9s 678ms/step - loss: 0.3346 - accuracy: 0.9004 - val_loss: 2.3259 - val_accuracy: 0.4698
Epoch 226/300
14/14 [==============================] - 9s 678ms/step - loss: 0.3255 - accuracy: 0.8971 - val_loss: 2.0691 - val_accuracy: 0.5000
Epoch 227/300
14/14 [==============================] - 10s 680ms/step - loss: 0.2422 - accuracy: 0.9368 - val_loss: 2.1207 - val_accuracy: 0.5025
Epoch 228/300
14/14 [==============================] - 10s 679ms/step - loss: 0.2589 - accuracy: 0.9267 - val_loss: 2.0990 - val_accuracy: 0.5076
Epoch 229/300
14/14 [==============================] - 10s 687ms/step - loss: 0.2560 - accuracy: 0.9278 - val_loss: 2.2543 - val_accuracy: 0.5034
Epoch 230/300
14/14 [==============================] - 10s 695ms/step - loss: 0.2167 - accuracy: 0.9452 - val_loss: 2.2757 - val_accuracy: 0.4849
Epoch 231/300
14/14 [==============================] - 9s 677ms/step - loss: 0.2502 - accuracy: 0.9236 - val_loss: 2.4615 - val_accuracy: 0.4715
Epoch 232/300
14/14 [==============================] - 9s 677ms/step - loss: 0.5349 - accuracy: 0.8145 - val_loss: 2.6429 - val_accuracy: 0.4505
Epoch 233/300
14/14 [==============================] - 9s 677ms/step - loss: 0.7753 - accuracy: 0.7281 - val_loss: 2.1479 - val_accuracy: 0.4748
Epoch 234/300
14/14 [==============================] - 9s 674ms/step - loss: 0.5559 - accuracy: 0.8112 - val_loss: 2.1226 - val_accuracy: 0.4572
Epoch 235/300
14/14 [==============================] - 10s 680ms/step - loss: 0.3308 - accuracy: 0.8954 - val_loss: 2.1887 - val_accuracy: 0.4874
Epoch 236/300
14/14 [==============================] - 10s 688ms/step - loss: 0.2960 - accuracy: 0.9063 - val_loss: 2.1141 - val_accuracy: 0.5076
Epoch 237/300
14/14 [==============================] - 10s 694ms/step - loss: 0.3191 - accuracy: 0.8965 - val_loss: 2.4055 - val_accuracy: 0.4740
Epoch 238/300
14/14 [==============================] - 10s 728ms/step - loss: 0.3247 - accuracy: 0.8948 - val_loss: 2.1521 - val_accuracy: 0.5008
Epoch 239/300
14/14 [==============================] - 10s 733ms/step - loss: 0.2461 - accuracy: 0.9284 - val_loss: 2.1911 - val_accuracy: 0.4933
Epoch 240/300
14/14 [==============================] - 16s 1s/step - loss: 0.2089 - accuracy: 0.9427 - val_loss: 2.3591 - val_accuracy: 0.4874
Epoch 241/300
14/14 [==============================] - 16s 1s/step - loss: 0.2535 - accuracy: 0.9245 - val_loss: 2.4546 - val_accuracy: 0.4765
Epoch 242/300
14/14 [==============================] - 16s 1s/step - loss: 0.3404 - accuracy: 0.8867 - val_loss: 2.2560 - val_accuracy: 0.4916
Epoch 243/300
14/14 [==============================] - 16s 1s/step - loss: 0.2847 - accuracy: 0.9077 - val_loss: 2.4277 - val_accuracy: 0.4656
Epoch 244/300
14/14 [==============================] - 16s 1s/step - loss: 0.3043 - accuracy: 0.8996 - val_loss: 2.5261 - val_accuracy: 0.4732
Epoch 245/300
14/14 [==============================] - 16s 1s/step - loss: 0.3046 - accuracy: 0.8985 - val_loss: 2.4367 - val_accuracy: 0.4790
Epoch 246/300
14/14 [==============================] - 16s 1s/step - loss: 0.6650 - accuracy: 0.7676 - val_loss: 3.0415 - val_accuracy: 0.3750
Epoch 247/300
14/14 [==============================] - 16s 1s/step - loss: 0.5431 - accuracy: 0.8050 - val_loss: 2.2116 - val_accuracy: 0.4765
Epoch 248/300
14/14 [==============================] - 16s 1s/step - loss: 0.2978 - accuracy: 0.9119 - val_loss: 2.2318 - val_accuracy: 0.5084
Epoch 249/300
14/14 [==============================] - 16s 1s/step - loss: 0.2539 - accuracy: 0.9234 - val_loss: 2.3423 - val_accuracy: 0.4899
Epoch 250/300
14/14 [==============================] - 16s 1s/step - loss: 0.2560 - accuracy: 0.9231 - val_loss: 2.6577 - val_accuracy: 0.4673
Epoch 251/300
14/14 [==============================] - 16s 1s/step - loss: 0.4137 - accuracy: 0.8573 - val_loss: 2.6604 - val_accuracy: 0.4681
Epoch 252/300
14/14 [==============================] - 16s 1s/step - loss: 0.3633 - accuracy: 0.8708 - val_loss: 2.6680 - val_accuracy: 0.3968
Epoch 253/300
14/14 [==============================] - 16s 1s/step - loss: 0.3608 - accuracy: 0.8780 - val_loss: 2.4844 - val_accuracy: 0.4799
Epoch 254/300
14/14 [==============================] - 16s 1s/step - loss: 0.1985 - accuracy: 0.9491 - val_loss: 2.2339 - val_accuracy: 0.5034
Epoch 255/300
14/14 [==============================] - 16s 1s/step - loss: 0.1590 - accuracy: 0.9589 - val_loss: 2.4226 - val_accuracy: 0.4866
Epoch 256/300
14/14 [==============================] - 16s 1s/step - loss: 0.1467 - accuracy: 0.9648 - val_loss: 2.3801 - val_accuracy: 0.5034
Epoch 257/300
14/14 [==============================] - 16s 1s/step - loss: 0.1478 - accuracy: 0.9625 - val_loss: 2.5183 - val_accuracy: 0.4992
Epoch 258/300
14/14 [==============================] - 16s 1s/step - loss: 0.1928 - accuracy: 0.9413 - val_loss: 2.4903 - val_accuracy: 0.4966
Epoch 259/300
14/14 [==============================] - 16s 1s/step - loss: 0.2064 - accuracy: 0.9357 - val_loss: 2.4752 - val_accuracy: 0.5034
Epoch 260/300
14/14 [==============================] - 16s 1s/step - loss: 0.1331 - accuracy: 0.9645 - val_loss: 2.4764 - val_accuracy: 0.5034
Epoch 261/300
14/14 [==============================] - 19s 1s/step - loss: 0.1580 - accuracy: 0.9550 - val_loss: 2.5814 - val_accuracy: 0.4891
Epoch 262/300
14/14 [==============================] - 17s 1s/step - loss: 0.1493 - accuracy: 0.9569 - val_loss: 2.4901 - val_accuracy: 0.5076
Epoch 263/300
14/14 [==============================] - 16s 1s/step - loss: 0.1319 - accuracy: 0.9670 - val_loss: 2.7484 - val_accuracy: 0.4849
Epoch 264/300
14/14 [==============================] - 16s 1s/step - loss: 0.2341 - accuracy: 0.9192 - val_loss: 2.7324 - val_accuracy: 0.4933
Epoch 265/300
14/14 [==============================] - 16s 1s/step - loss: 0.1616 - accuracy: 0.9538 - val_loss: 2.5182 - val_accuracy: 0.5092
Epoch 266/300
14/14 [==============================] - 17s 1s/step - loss: 0.2227 - accuracy: 0.9245 - val_loss: 2.6931 - val_accuracy: 0.5109
Epoch 267/300
14/14 [==============================] - 16s 1s/step - loss: 0.2610 - accuracy: 0.9113 - val_loss: 2.9251 - val_accuracy: 0.4606
Epoch 268/300
14/14 [==============================] - 16s 1s/step - loss: 0.2141 - accuracy: 0.9303 - val_loss: 2.6376 - val_accuracy: 0.4773
Epoch 269/300
14/14 [==============================] - 16s 1s/step - loss: 0.1411 - accuracy: 0.9611 - val_loss: 2.5849 - val_accuracy: 0.4983
Epoch 270/300
14/14 [==============================] - 16s 1s/step - loss: 0.1696 - accuracy: 0.9469 - val_loss: 3.2844 - val_accuracy: 0.4656
Epoch 271/300
14/14 [==============================] - 16s 1s/step - loss: 0.4517 - accuracy: 0.8310 - val_loss: 2.5871 - val_accuracy: 0.5000
Epoch 272/300
14/14 [==============================] - 16s 1s/step - loss: 0.2868 - accuracy: 0.8973 - val_loss: 2.4003 - val_accuracy: 0.5042
Epoch 273/300
14/14 [==============================] - 16s 1s/step - loss: 0.2215 - accuracy: 0.9239 - val_loss: 2.6729 - val_accuracy: 0.4933
Epoch 274/300
14/14 [==============================] - 16s 1s/step - loss: 0.2340 - accuracy: 0.9231 - val_loss: 2.6005 - val_accuracy: 0.5050
Epoch 275/300
14/14 [==============================] - 16s 1s/step - loss: 1.3698 - accuracy: 0.6685 - val_loss: 2.9669 - val_accuracy: 0.3716
Epoch 276/300
14/14 [==============================] - 16s 1s/step - loss: 1.9059 - accuracy: 0.4487 - val_loss: 1.7987 - val_accuracy: 0.4169
Epoch 277/300
14/14 [==============================] - 16s 1s/step - loss: 1.0004 - accuracy: 0.6274 - val_loss: 1.8304 - val_accuracy: 0.4505
Epoch 278/300
14/14 [==============================] - 16s 1s/step - loss: 0.6917 - accuracy: 0.7589 - val_loss: 1.9566 - val_accuracy: 0.4639
Epoch 279/300
14/14 [==============================] - 16s 1s/step - loss: 0.4688 - accuracy: 0.8557 - val_loss: 1.9759 - val_accuracy: 0.4849
Epoch 280/300
14/14 [==============================] - 16s 1s/step - loss: 0.9782 - accuracy: 0.6436 - val_loss: 2.3039 - val_accuracy: 0.4002
Epoch 281/300
14/14 [==============================] - 16s 1s/step - loss: 0.8137 - accuracy: 0.6957 - val_loss: 1.7043 - val_accuracy: 0.4966
Epoch 282/300
14/14 [==============================] - 16s 1s/step - loss: 0.4522 - accuracy: 0.8694 - val_loss: 1.8781 - val_accuracy: 0.4883
Epoch 283/300
14/14 [==============================] - 16s 1s/step - loss: 0.3323 - accuracy: 0.9116 - val_loss: 1.9290 - val_accuracy: 0.5034
Epoch 284/300
14/14 [==============================] - 19s 1s/step - loss: 0.2484 - accuracy: 0.9385 - val_loss: 2.0889 - val_accuracy: 0.4698
Epoch 285/300
14/14 [==============================] - 17s 1s/step - loss: 0.2481 - accuracy: 0.9354 - val_loss: 1.9933 - val_accuracy: 0.5159
Epoch 286/300
14/14 [==============================] - 16s 1s/step - loss: 0.3703 - accuracy: 0.8713 - val_loss: 2.2794 - val_accuracy: 0.4924
Epoch 287/300
14/14 [==============================] - 16s 1s/step - loss: 0.4180 - accuracy: 0.8545 - val_loss: 2.2080 - val_accuracy: 0.5109
Epoch 288/300
14/14 [==============================] - 18s 1s/step - loss: 0.5003 - accuracy: 0.8199 - val_loss: 2.1323 - val_accuracy: 0.4857
Epoch 289/300
14/14 [==============================] - 16s 1s/step - loss: 0.3212 - accuracy: 0.9027 - val_loss: 2.0699 - val_accuracy: 0.5000
Epoch 290/300
14/14 [==============================] - 16s 1s/step - loss: 0.2765 - accuracy: 0.9085 - val_loss: 2.1609 - val_accuracy: 0.5126
Epoch 291/300
14/14 [==============================] - 16s 1s/step - loss: 0.1996 - accuracy: 0.9508 - val_loss: 2.0939 - val_accuracy: 0.5092
Epoch 292/300
14/14 [==============================] - 16s 1s/step - loss: 0.1975 - accuracy: 0.9455 - val_loss: 2.3560 - val_accuracy: 0.4933
Epoch 293/300
14/14 [==============================] - 16s 1s/step - loss: 0.2405 - accuracy: 0.9250 - val_loss: 2.4983 - val_accuracy: 0.4975
Epoch 294/300
14/14 [==============================] - 16s 1s/step - loss: 0.2405 - accuracy: 0.9192 - val_loss: 2.2753 - val_accuracy: 0.5168
Epoch 295/300
14/14 [==============================] - 16s 1s/step - loss: 0.2296 - accuracy: 0.9250 - val_loss: 2.3773 - val_accuracy: 0.5176
Epoch 296/300
14/14 [==============================] - 16s 1s/step - loss: 0.2302 - accuracy: 0.9239 - val_loss: 2.4987 - val_accuracy: 0.4815
Epoch 297/300
14/14 [==============================] - 16s 1s/step - loss: 0.2212 - accuracy: 0.9278 - val_loss: 2.4633 - val_accuracy: 0.4958
Epoch 298/300
14/14 [==============================] - 16s 1s/step - loss: 0.5155 - accuracy: 0.8017 - val_loss: 2.6990 - val_accuracy: 0.4379
Epoch 299/300
14/14 [==============================] - 16s 1s/step - loss: 0.4261 - accuracy: 0.8515 - val_loss: 2.2232 - val_accuracy: 0.4891
Epoch 300/300
14/14 [==============================] - 16s 1s/step - loss: 0.2518 - accuracy: 0.9222 - val_loss: 2.2145 - val_accuracy: 0.5134
In [19]:
print('Validation accuracy using ANN for seedling classifier is : ', max(history.history['val_accuracy'])*100,'%')
Validation accuracy using ANN for seedling classifier is :  51.761746406555176 %
In [20]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(history.history['loss'], label='train loss')
plt.plot(history.history['val_loss'], label='val loss')
plt.legend()
plt.show()

# plot the accuracy
plt.plot(history.history['accuracy'], label='train acc')
plt.plot(history.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
In [21]:
# save it as a pickle file
from tensorflow.keras.models import load_model
seedling_model.save('seedling_model.pkl')
WARNING:tensorflow:From C:\Users\admin\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\training\tracking\tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
WARNING:tensorflow:From C:\Users\admin\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\training\tracking\tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
INFO:tensorflow:Assets written to: seedling_model.pkl\assets

Plant Seedling Classifier Classifier - Using Convolutional Neural Networks

In [22]:
# Deep Learning CNN model to recognize face
'''This script uses a database of images and creates CNN model on top of it to test
   if the given image is recognized correctly or not'''

'''########################## IMAGE PRE-PROCESSING for TRAINING and TESTING data ##############################'''

TrainingImagePath='C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\train'
TestingImagePath='C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\test'

from keras.preprocessing.image import ImageDataGenerator

# Defining pre-processing transformations on raw images of training data
train_datagen = ImageDataGenerator(
        rescale=1./255,
        shear_range=0.1,
        zoom_range=0.1,
        horizontal_flip=True,
        validation_split=0.25)

# Generating the Training Data
training_set = train_datagen.flow_from_directory(
        TrainingImagePath,
        target_size=(100, 100),
        batch_size=32,
        class_mode='categorical',
        subset='training')


# Generating the Testing Data
validation_set = train_datagen.flow_from_directory(
        TrainingImagePath,
        target_size=(100, 100),
        batch_size=32,
        class_mode='categorical',
        subset='validation')

# Printing class labels for each face
# test_set.class_indices
Found 3581 images belonging to 12 classes.
Found 1186 images belonging to 12 classes.
In [23]:
'''########################## IMAGE PRE-PROCESSING for TRAINING and TESTING data ##############################'''

TrainingImagePath='C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\train'
TestingImagePath='C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\test'

from keras.preprocessing.image import ImageDataGenerator

# Defining pre-processing transformations on raw images of training data
train_datagen = ImageDataGenerator(
        rescale=1./255,
        shear_range=0.1,
        zoom_range=0.1,
        horizontal_flip=True,
        validation_split=0.25)

# Generating the Training Data
training_set = train_datagen.flow_from_directory(
        TrainingImagePath,
        target_size=(100, 100),
        batch_size=32,
        class_mode='categorical',
        subset='training')


# Generating the Validation Data
validation_set = train_datagen.flow_from_directory(
        TrainingImagePath,
        target_size=(100, 100),
        batch_size=32,
        class_mode='categorical',
        subset='validation')

# Printing class labels for each plant seedling
# test_set.class_indices
Found 3581 images belonging to 12 classes.
Found 1186 images belonging to 12 classes.
In [26]:
'''#################### Creating lookup table for all plant seedlings ##############################'''
# class_indices have the numeric tag for each plant seedling
TrainClasses=training_set.class_indices

# Storing the plant seedling and the numeric tag for future reference
ResultMap={}
for plant_seedlingValue,plant_seedlingName in zip(TrainClasses.values(),TrainClasses.keys()):
    ResultMap[plant_seedlingValue]=plant_seedlingName

# Saving the plant seedling map for future reference
import pickle
with open("C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\ResultsMap.pkl", 'wb') as f:
    pickle.dump(ResultMap, f, pickle.HIGHEST_PROTOCOL)

print("Mapping of Plant Seedling and its ID",ResultMap)

# The number of neurons for the output layer is equal to the number of plant seedlings
OutputNeurons=len(ResultMap)
print('\n The Number of output neurons: ', OutputNeurons)
Mapping of Plant Seedling and its ID {0: 'Black-grass', 1: 'Charlock', 2: 'Cleavers', 3: 'Common Chickweed', 4: 'Common wheat', 5: 'Fat Hen', 6: 'Loose Silky-bent', 7: 'Maize', 8: 'Scentless Mayweed', 9: 'Shepherds Purse', 10: 'Small-flowered Cranesbill', 11: 'Sugar beet'}

 The Number of output neurons:  12
In [29]:
'''######################## Create CNN deep learning model ####################################'''
from keras.models import Sequential
from keras.layers import Convolution2D
from keras.layers import MaxPool2D
from keras.layers import Flatten
from keras.layers import Dense

'''Initializing the Convolutional Neural Network'''
seedling_classifier= Sequential()

''' STEP--1 Convolution
# Adding the first layer of CNN
# we are using the format (100,100,3) because we are using TensorFlow backend
# It means 3 matrix of size (100X100) pixels representing Red, Green and Blue components of pixels
'''
seedling_classifier.add(Convolution2D(32, kernel_size=(5, 5), strides=(1, 1), input_shape=(100,100,3), activation='relu'))

'''# STEP--2 MAX Pooling'''
seedling_classifier.add(MaxPool2D(pool_size=(2,2)))

'''############## ADDITIONAL LAYER of CONVOLUTION for better accuracy #################'''
seedling_classifier.add(Convolution2D(64, kernel_size=(5, 5), strides=(1, 1), activation='relu'))

seedling_classifier.add(MaxPool2D(pool_size=(2,2)))


'''# STEP--3 FLattening'''
seedling_classifier.add(Flatten())

'''# STEP--4 Fully Connected Neural Network'''
seedling_classifier.add(Dense(512, activation='relu'))

seedling_classifier.add(Dense(OutputNeurons, activation='softmax'))

'''# Compiling the CNN'''
#seedling_classifier.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
seedling_classifier.compile(loss='categorical_crossentropy', optimizer = 'adam', metrics=["accuracy"])
In [30]:
import time
# Measuring the time taken by the model to train
StartTime=time.time()

# Starting the model training
history = seedling_classifier.fit_generator(
                    training_set,
                    steps_per_epoch=50,validation_data=validation_set,
                    epochs=30)

EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
Epoch 1/30
50/50 [==============================] - 98s 2s/step - loss: 2.4280 - accuracy: 0.1509 - val_loss: 2.3265 - val_accuracy: 0.2251
Epoch 2/30
50/50 [==============================] - 97s 2s/step - loss: 1.9773 - accuracy: 0.3325 - val_loss: 1.6816 - val_accuracy: 0.4089
Epoch 3/30
50/50 [==============================] - 97s 2s/step - loss: 1.6252 - accuracy: 0.4356 - val_loss: 1.5214 - val_accuracy: 0.4890
Epoch 4/30
50/50 [==============================] - 99s 2s/step - loss: 1.5121 - accuracy: 0.4688 - val_loss: 1.4674 - val_accuracy: 0.4848
Epoch 5/30
50/50 [==============================] - 101s 2s/step - loss: 1.3891 - accuracy: 0.5191 - val_loss: 1.3017 - val_accuracy: 0.5523
Epoch 6/30
50/50 [==============================] - 100s 2s/step - loss: 1.2739 - accuracy: 0.5512 - val_loss: 1.1745 - val_accuracy: 0.5919
Epoch 7/30
50/50 [==============================] - 99s 2s/step - loss: 1.1493 - accuracy: 0.5900 - val_loss: 1.2206 - val_accuracy: 0.5944
Epoch 8/30
50/50 [==============================] - 97s 2s/step - loss: 1.0810 - accuracy: 0.6212 - val_loss: 1.0911 - val_accuracy: 0.6492
Epoch 9/30
50/50 [==============================] - 98s 2s/step - loss: 0.9795 - accuracy: 0.6650 - val_loss: 1.0301 - val_accuracy: 0.6560
Epoch 10/30
50/50 [==============================] - 99s 2s/step - loss: 0.9160 - accuracy: 0.6975 - val_loss: 1.0033 - val_accuracy: 0.6754
Epoch 11/30
50/50 [==============================] - 100s 2s/step - loss: 0.8519 - accuracy: 0.7138 - val_loss: 1.0031 - val_accuracy: 0.6602
Epoch 12/30
50/50 [==============================] - 96s 2s/step - loss: 0.8356 - accuracy: 0.7157 - val_loss: 0.8808 - val_accuracy: 0.7091
Epoch 13/30
50/50 [==============================] - 99s 2s/step - loss: 0.7973 - accuracy: 0.7376 - val_loss: 1.0316 - val_accuracy: 0.6509
Epoch 14/30
50/50 [==============================] - 92s 2s/step - loss: 0.7190 - accuracy: 0.7625 - val_loss: 0.8440 - val_accuracy: 0.7277
Epoch 15/30
50/50 [==============================] - 54s 1s/step - loss: 0.7179 - accuracy: 0.7600 - val_loss: 0.9407 - val_accuracy: 0.7049
Epoch 16/30
50/50 [==============================] - 52s 1s/step - loss: 0.7270 - accuracy: 0.7520 - val_loss: 0.8935 - val_accuracy: 0.7150
Epoch 17/30
50/50 [==============================] - 54s 1s/step - loss: 0.6665 - accuracy: 0.7663 - val_loss: 0.8541 - val_accuracy: 0.7336
Epoch 18/30
50/50 [==============================] - 54s 1s/step - loss: 0.6279 - accuracy: 0.7800 - val_loss: 0.8396 - val_accuracy: 0.7395
Epoch 19/30
50/50 [==============================] - 57s 1s/step - loss: 0.6527 - accuracy: 0.7833 - val_loss: 0.7782 - val_accuracy: 0.7521
Epoch 20/30
50/50 [==============================] - 54s 1s/step - loss: 0.5974 - accuracy: 0.8040 - val_loss: 0.7668 - val_accuracy: 0.7555
Epoch 21/30
50/50 [==============================] - 53s 1s/step - loss: 0.5340 - accuracy: 0.8225 - val_loss: 0.7823 - val_accuracy: 0.7673
Epoch 22/30
50/50 [==============================] - 53s 1s/step - loss: 0.4936 - accuracy: 0.8263 - val_loss: 0.8462 - val_accuracy: 0.7513
Epoch 23/30
50/50 [==============================] - 54s 1s/step - loss: 0.5477 - accuracy: 0.8012 - val_loss: 0.7637 - val_accuracy: 0.7530
Epoch 24/30
50/50 [==============================] - 54s 1s/step - loss: 0.5117 - accuracy: 0.8153 - val_loss: 0.7901 - val_accuracy: 0.7530
Epoch 25/30
50/50 [==============================] - 53s 1s/step - loss: 0.4680 - accuracy: 0.8431 - val_loss: 0.7388 - val_accuracy: 0.7690
Epoch 26/30
50/50 [==============================] - 53s 1s/step - loss: 0.4799 - accuracy: 0.8234 - val_loss: 0.8103 - val_accuracy: 0.7580
Epoch 27/30
50/50 [==============================] - 54s 1s/step - loss: 0.4264 - accuracy: 0.8431 - val_loss: 0.8114 - val_accuracy: 0.7664
Epoch 28/30
50/50 [==============================] - 54s 1s/step - loss: 0.4144 - accuracy: 0.8594 - val_loss: 0.7798 - val_accuracy: 0.7757
Epoch 29/30
50/50 [==============================] - 53s 1s/step - loss: 0.3865 - accuracy: 0.8535 - val_loss: 0.8269 - val_accuracy: 0.7395
Epoch 30/30
50/50 [==============================] - 53s 1s/step - loss: 0.3974 - accuracy: 0.8650 - val_loss: 0.7968 - val_accuracy: 0.7631
############### Total Time Taken:  38 Minutes #############
In [31]:
results = seedling_classifier.evaluate(validation_set)
print('Validation accuracy using CNN for seedling classifier is : ', results[1]*100,'%')
38/38 [==============================] - 17s 448ms/step - loss: 0.8331 - accuracy: 0.7530
Validation accuracy using CNN for seedling classifier is :  75.29510855674744 %
In [32]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(history.history['loss'], label='train loss')
plt.plot(history.history['val_loss'], label='val loss')
plt.legend()
plt.show()

# plot the accuracy
plt.plot(history.history['accuracy'], label='train acc')
plt.plot(history.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
In [33]:
## Saving the model
seedling_classifier.save("C:/Users/admin/Great Learning/Computer Vision/Project/plant_seedling_classifier_cnn.pkl")
INFO:tensorflow:Assets written to: C:/Users/admin/Great Learning/Computer Vision/Project/plant_seedling_classifier_cnn.pkl\assets
INFO:tensorflow:Assets written to: C:/Users/admin/Great Learning/Computer Vision/Project/plant_seedling_classifier_cnn.pkl\assets

Plant Seedling To Be Predicted

Predict.png

We can clearly see from the accuracies that CNN perform better than DL and ML models. Hence, the CNN model has been chose as the best model here.

In [34]:
'''########################## Making single predictions ############################'''
import numpy as np
from keras.preprocessing import image

testImage='C:/Users/admin/Desktop/Great Learning/Computer Vision/Project/Predict.png'
test_image=image.load_img(testImage,target_size=(100, 100))
test_image=image.img_to_array(test_image)

test_image=np.expand_dims(test_image,axis=0)

result=seedling_classifier.predict(test_image,verbose=0)
#print(training_set.class_indices)

print('####'*10)
print('Prediction is: ',ResultMap[np.argmax(result)])
########################################
Prediction is:  Maize

------------------------------------------------------------------------------------------------------------

PART - TWO

Q) TASK: Explain in depth why CNN out performs neural networks which in turn out perform supervised learning models when it comes to image classification. Use the markdown option in Jupiter for your answer.

Answer :

Why DL Algorithms perform better than Supervised Learning Algorithms?

a) Data dependencies : The most important difference between deep learning and traditional machine learning is its performance as the scale of data increases. When the data is small, deep learning algorithms don’t perform that well. This is because deep learning algorithms need a large amount of data to understand it perfectly. On the other hand, traditional machine learning algorithms with their handcrafted rules prevail in this scenario.

b) Hardware Dependencies : Deep learning algorithms heavily depend on high-end machines, contrary to traditional machine learning algorithms, which can work on low-end machines. This is because the requirements of deep learning algorithm include GPUs which are an integral part of its working. Deep learning algorithms inherently do a large amount of matrix multiplication operations. These operations can be efficiently optimized using a GPU because GPU is built for this purpose.

c) Execution Time : Usually, a deep learning algorithm takes a long time to train. This is because there are so many parameters in a deep learning algorithm that training them takes longer than usual. State of the art deep learning algorithm ResNet takes about two weeks to train completely from scratch. Whereas machine learning comparatively takes much less time to train, ranging from a few seconds to a few hours.This is turn is completely reversed on testing time. At test time, deep learning algorithm takes much less time to run. Whereas, if you compare it with k-nearest neighbors (a type of machine learning algorithm), test time increases on increasing the size of data. Although this is not applicable on all machine learning algorithms, as some of them have small testing times too.

Why CNN is better than ANN for image classification?

a) Converting images to a vector : While solving an image classification problem using ANN, the first step is to convert a 2-dimensional image into a 1-dimensional vector prior to training the model. This has two drawbacks:

i) The number of trainable parameters increases drastically with an increase in the size of the image.

ii) ANN loses the spatial features of an image. Spatial features refer to the arrangement of the pixels in an image.

------------------------------------------------------------------------------------------------------------

PART - THREE

Generation of Image data for 15 car images given

In [35]:
import os
from keras.preprocessing.image import ImageDataGenerator, array_to_img, img_to_array, load_img
from glob import glob

datagen = ImageDataGenerator(
        rotation_range=38,
        width_shift_range=0.32,
        height_shift_range=0.2,
        shear_range=0.23,
        zoom_range=0.18,
        horizontal_flip=True,
        fill_mode='nearest')

img = load_img('C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\Cars Images\\00001.jpg')  # this is a PIL image
x = img_to_array(img)  # this is a Numpy array with shape (400,600,3)
print(x.shape)
x = x.reshape((1,) + x.shape)  # this is a Numpy array with shape (1, 3, 400, 600)

images = glob('C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\Cars Images\\*')
Images = []
for i in images:
    Images.append(i[-9:])
# print(Images)
car_names = []
for i in Images:
    car_names.append(i[:5])
print(car_names)

import time
StartTime = time.time()

for i in car_names:
    img = load_img('C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\Cars Images\\' + i + '.jpg')  # this is a PIL image
    x = img_to_array(img)  # this is a Numpy array with shape (400,600,3)
#     print(x.shape)
    x = x.reshape((1,) + x.shape)
    directory = i
    parent_dir = "C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\Cars Images"
    path = os.path.join(parent_dir, directory) 
    os.mkdir(path) 

    # the .flow() command below generates batches of randomly transformed images
    # and saves the results to the `preview/` directory
    j = 0
    for batch in datagen.flow(x, batch_size=1,
                              save_to_dir=path, save_prefix=j, save_format='jpeg'):
        j += 1
        if j > 9:
            break  # otherwise the generator would loop indefinitely
            
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
(400, 600, 3)
['00001', '00002', '00003', '00004', '00005', '00006', '00007', '00008', '00009', '00010', '00011', '00012', '00013', '00014', '00015']
############### Total Time Taken:  0 Minutes #############

Files created for each of the car types, that contain the transformations implemented.

car-directories.png

In [101]:
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from IPython.display import Image

cars_names = ['00001', '00002', '00003', '00004', '00005', '00006', '00007', '00008', '00009', '00010', '00011', '00012', '00013', '00014', '00015']
for i in cars_names:
        img_dir = 'C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\Cars Images\\' + i
        images = glob(img_dir + '\\*')
        for image in images:
            print('Car : ',i)
            img = mpimg.imread(image)
            plt.imshow(img)
            plt.show()
        print("#######################################################################################################################")
        
Car :  00001
Car :  00001
Car :  00001
Car :  00001
Car :  00001
Car :  00001
Car :  00001
Car :  00001
Car :  00001
Car :  00001
#######################################################################################################################
Car :  00002
Car :  00002
Car :  00002
Car :  00002
Car :  00002
Car :  00002
Car :  00002
Car :  00002
Car :  00002
Car :  00002
#######################################################################################################################
Car :  00003
Car :  00003
Car :  00003
Car :  00003
Car :  00003
Car :  00003
Car :  00003
Car :  00003
Car :  00003
Car :  00003
#######################################################################################################################
Car :  00004
Car :  00004
Car :  00004
Car :  00004
Car :  00004
Car :  00004
Car :  00004
Car :  00004
Car :  00004
Car :  00004
#######################################################################################################################
Car :  00005
Car :  00005
Car :  00005
Car :  00005
Car :  00005
Car :  00005
Car :  00005
Car :  00005
Car :  00005
Car :  00005
#######################################################################################################################
Car :  00006
Car :  00006
Car :  00006
Car :  00006
Car :  00006
Car :  00006
Car :  00006
Car :  00006
Car :  00006
Car :  00006
#######################################################################################################################
Car :  00007
Car :  00007
Car :  00007
Car :  00007
Car :  00007
Car :  00007
Car :  00007
Car :  00007
Car :  00007
Car :  00007
#######################################################################################################################
Car :  00008
Car :  00008
Car :  00008
Car :  00008
Car :  00008
Car :  00008
Car :  00008
Car :  00008
Car :  00008
Car :  00008
#######################################################################################################################
Car :  00009
Car :  00009
Car :  00009
Car :  00009
Car :  00009
Car :  00009
Car :  00009
Car :  00009
Car :  00009
Car :  00009
#######################################################################################################################
Car :  00010
Car :  00010
Car :  00010
Car :  00010
Car :  00010
Car :  00010
Car :  00010
Car :  00010
Car :  00010
Car :  00010
#######################################################################################################################
Car :  00011
Car :  00011
Car :  00011
Car :  00011
Car :  00011
Car :  00011
Car :  00011
Car :  00011
Car :  00011
Car :  00011
#######################################################################################################################
Car :  00012
Car :  00012
Car :  00012
Car :  00012
Car :  00012
Car :  00012
Car :  00012
Car :  00012
Car :  00012
Car :  00012
#######################################################################################################################
Car :  00013
Car :  00013
Car :  00013
Car :  00013
Car :  00013
Car :  00013
Car :  00013
Car :  00013
Car :  00013
Car :  00013
#######################################################################################################################
Car :  00014
Car :  00014
Car :  00014
Car :  00014
Car :  00014
Car :  00014
Car :  00014
Car :  00014
Car :  00014
Car :  00014
#######################################################################################################################
Car :  00015
Car :  00015
Car :  00015
Car :  00015
Car :  00015
Car :  00015
Car :  00015
Car :  00015
Car :  00015
Car :  00015
#######################################################################################################################
In [100]:
image
Out[100]:
'C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\Cars Images\\00004\\0_0_1416.jpeg'

------------------------------------------------------------------------------------------------------------

PART - FOUR

Flower Classifier

In [36]:
import tflearn.datasets.oxflower17 as oxflower17 
WARNING:tensorflow:From C:\Users\admin\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\compat\v2_compat.py:96: disable_resource_variables (from tensorflow.python.ops.variable_scope) is deprecated and will be removed in a future version.
Instructions for updating:
non-resource variables are not supported in the long term
WARNING:tensorflow:From C:\Users\admin\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\compat\v2_compat.py:96: disable_resource_variables (from tensorflow.python.ops.variable_scope) is deprecated and will be removed in a future version.
Instructions for updating:
non-resource variables are not supported in the long term
curses is not supported on this machine (please install/reinstall curses for an optimal experience)
In [37]:
X, y = oxflower17.load_data(one_hot=True)
In [38]:
X.shape
Out[38]:
(1360, 224, 224, 3)
In [39]:
X = np.asarray(X).reshape(X.shape[0], X.shape[1]*X.shape[2]*X.shape[3])
In [40]:
X.shape
Out[40]:
(1360, 150528)
In [41]:
import pickle

pickle_X = open("X_flowers.pickle","wb")
pickle.dump(X,pickle_X)
pickle_X.close()

pickle_Y = open("Y_flowers.pickle","wb")
pickle.dump(y,pickle_Y)
pickle_Y.close()
In [3]:
import pickle
pickle_in = open("X_flowers.pickle","rb")
X = pickle.load(pickle_in)
print('The shape of X is: ',X.shape)

pickle_in = open("Y_flowers.pickle","rb")
y = pickle.load(pickle_in)
print('The shape of y is: ',y.shape)
The shape of X is:  (1360, 150528)
The shape of y is:  (1360, 17)

Flower Classifier - Using KNN

In [43]:
# Splitting the data into train/test
from sklearn.model_selection import train_test_split

X_train,X_test,y_train,y_test = train_test_split(X,y,test_size=0.25,random_state=42)

print('The shape of X_train is: ',X_train.shape)
print('The shape of y_train is: ',y_train.shape)
print('The shape of X_test is: ',X_test.shape)
print('The shape of y_test is: ',y_test.shape)
The shape of X_train is:  (1020, 150528)
The shape of y_train is:  (1020, 17)
The shape of X_test is:  (340, 150528)
The shape of y_test is:  (340, 17)
In [44]:
from sklearn.neighbors import KNeighborsClassifier
import time

StartTime = time.time()

model_knn_flower = KNeighborsClassifier(n_neighbors=3,weights='distance')
model_knn_flower = model_knn_flower.fit(X_train,y_train)
    
y_pred = model_knn_flower.predict(X_test)
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
############### Total Time Taken:  3 Minutes #############
In [47]:
# Classification Report
from sklearn import metrics
print(metrics.classification_report(y_test,y_pred))
              precision    recall  f1-score   support

           0       0.56      0.25      0.34        20
           1       0.25      0.05      0.08        21
           2       0.94      0.59      0.73        27
           3       1.00      0.17      0.30        23
           4       0.29      0.40      0.33        10
           5       0.21      0.24      0.22        21
           6       0.24      0.33      0.28        15
           7       0.80      0.36      0.50        22
           8       1.00      0.22      0.36        18
           9       0.25      0.08      0.12        13
          10       0.25      0.04      0.06        27
          11       0.00      0.00      0.00        21
          12       0.71      0.62      0.67        16
          13       0.33      0.05      0.09        19
          14       0.00      0.00      0.00        22
          15       0.45      0.35      0.39        26
          16       1.00      0.21      0.35        19

   micro avg       0.45      0.23      0.30       340
   macro avg       0.49      0.23      0.28       340
weighted avg       0.50      0.23      0.29       340
 samples avg       0.23      0.23      0.23       340

In [48]:
accuracy_knn_flower = metrics.classification_report(y_test, y_pred).split()[-2]
accuracy_percentage_knn_flower = float(accuracy_knn_flower)*100
In [49]:
print('The Accuracy of the flower classifier using K-NN model is :',accuracy_percentage_knn_flower,'%')
The Accuracy of the flower classifier using K-NN model is : 23.0 %

Flower Classifier - Using Deep neural Networks

In [1]:
import tensorflow as tf
import keras
from keras.models import Sequential
from keras.utils.np_utils import to_categorical
from keras.layers import Activation, Dense
# from keras.layers import BatchNormalization, Dropout
from keras import optimizers
In [17]:
from keras import regularizers
#Initialize the Artificial Neural Network Classifier
flowers_model = Sequential()

flowers_model.add(Dense(128, kernel_initializer = 'he_normal',input_shape = (X.shape[1], )))
#Adding Activation function
flowers_model.add(Activation('relu'))

#Hidden Layer 1
#Adding first Hidden layer of 64 nodes
flowers_model.add(Dense(64, kernel_initializer = 'he_normal'))
#Adding Activation function
flowers_model.add(Activation('relu'))

#Hidden Layer 2
#Adding first Hidden layer of 32 nodes
flowers_model.add(Dense(32, kernel_initializer = 'he_normal'))
#Adding Activation function
flowers_model.add(Activation('relu'))

# Output Layer
#Adding output layer which is of 17 nodes (digits)
flowers_model.add(Dense(17))
#Adding Activation function
# Here, we are using softmax function because we have multiclass classsification
flowers_model.add(Activation('softmax'))

print(flowers_model.summary())
Model: "sequential_3"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_11 (Dense)             (None, 128)               19267712  
_________________________________________________________________
activation_11 (Activation)   (None, 128)               0         
_________________________________________________________________
dense_12 (Dense)             (None, 64)                8256      
_________________________________________________________________
activation_12 (Activation)   (None, 64)                0         
_________________________________________________________________
dense_13 (Dense)             (None, 32)                2080      
_________________________________________________________________
activation_13 (Activation)   (None, 32)                0         
_________________________________________________________________
dense_14 (Dense)             (None, 17)                561       
_________________________________________________________________
activation_14 (Activation)   (None, 17)                0         
=================================================================
Total params: 19,278,609
Trainable params: 19,278,609
Non-trainable params: 0
_________________________________________________________________
None
In [18]:
# compiling the ANN classifier
adam = tf.keras.optimizers.Adam(lr=0.0001)
flowers_model.compile(optimizer = 'adam', loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [19]:
import time
# Measuring the time taken by the model to train
StartTime=time.time()
# Fitting the ANN to the Training data
history = flowers_model.fit(X, y,batch_size = 64, epochs = 300, verbose = 1,validation_split=0.25)
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
Epoch 1/300
16/16 [==============================] - 3s 167ms/step - loss: 29.4154 - accuracy: 0.0598 - val_loss: 19.0167 - val_accuracy: 0.0529
Epoch 2/300
16/16 [==============================] - 2s 138ms/step - loss: 8.4842 - accuracy: 0.0951 - val_loss: 2.9363 - val_accuracy: 0.0647
Epoch 3/300
16/16 [==============================] - 2s 134ms/step - loss: 2.7863 - accuracy: 0.0922 - val_loss: 2.7441 - val_accuracy: 0.0500
Epoch 4/300
16/16 [==============================] - 2s 132ms/step - loss: 2.7691 - accuracy: 0.0618 - val_loss: 2.7714 - val_accuracy: 0.0882
Epoch 5/300
16/16 [==============================] - 2s 133ms/step - loss: 2.7477 - accuracy: 0.0902 - val_loss: 2.8473 - val_accuracy: 0.1206
Epoch 6/300
16/16 [==============================] - 2s 133ms/step - loss: 2.7438 - accuracy: 0.0971 - val_loss: 2.7890 - val_accuracy: 0.0500
Epoch 7/300
16/16 [==============================] - 2s 136ms/step - loss: 2.7399 - accuracy: 0.0922 - val_loss: 2.7470 - val_accuracy: 0.0912
Epoch 8/300
16/16 [==============================] - 2s 139ms/step - loss: 2.7337 - accuracy: 0.1000 - val_loss: 2.7545 - val_accuracy: 0.0647
Epoch 9/300
16/16 [==============================] - 2s 137ms/step - loss: 2.6918 - accuracy: 0.0941 - val_loss: 2.7572 - val_accuracy: 0.0676
Epoch 10/300
16/16 [==============================] - 2s 138ms/step - loss: 2.6472 - accuracy: 0.0971 - val_loss: 2.7097 - val_accuracy: 0.0706
Epoch 11/300
16/16 [==============================] - 2s 136ms/step - loss: 2.6044 - accuracy: 0.1039 - val_loss: 2.7264 - val_accuracy: 0.1000
Epoch 12/300
16/16 [==============================] - 2s 137ms/step - loss: 2.6273 - accuracy: 0.1088 - val_loss: 2.7021 - val_accuracy: 0.1000
Epoch 13/300
16/16 [==============================] - 2s 133ms/step - loss: 2.6162 - accuracy: 0.1147 - val_loss: 2.7184 - val_accuracy: 0.1118
Epoch 14/300
16/16 [==============================] - 2s 135ms/step - loss: 2.5784 - accuracy: 0.1255 - val_loss: 2.6874 - val_accuracy: 0.1088
Epoch 15/300
16/16 [==============================] - 2s 133ms/step - loss: 2.5490 - accuracy: 0.1559 - val_loss: 2.6533 - val_accuracy: 0.1118
Epoch 16/300
16/16 [==============================] - 2s 137ms/step - loss: 2.5034 - accuracy: 0.1725 - val_loss: 2.7075 - val_accuracy: 0.0912
Epoch 17/300
16/16 [==============================] - 2s 135ms/step - loss: 2.4888 - accuracy: 0.1775 - val_loss: 2.6763 - val_accuracy: 0.1235
Epoch 18/300
16/16 [==============================] - 2s 138ms/step - loss: 2.4978 - accuracy: 0.1755 - val_loss: 2.6637 - val_accuracy: 0.1147
Epoch 19/300
16/16 [==============================] - 2s 134ms/step - loss: 2.5607 - accuracy: 0.1569 - val_loss: 2.7135 - val_accuracy: 0.1059
Epoch 20/300
16/16 [==============================] - 2s 135ms/step - loss: 2.5164 - accuracy: 0.1716 - val_loss: 2.7392 - val_accuracy: 0.0824
Epoch 21/300
16/16 [==============================] - 2s 133ms/step - loss: 2.5046 - accuracy: 0.1627 - val_loss: 2.6710 - val_accuracy: 0.1412
Epoch 22/300
16/16 [==============================] - 2s 133ms/step - loss: 2.3691 - accuracy: 0.1931 - val_loss: 2.7190 - val_accuracy: 0.1382
Epoch 23/300
16/16 [==============================] - 2s 137ms/step - loss: 2.2757 - accuracy: 0.2304 - val_loss: 2.6842 - val_accuracy: 0.1647
Epoch 24/300
16/16 [==============================] - 2s 136ms/step - loss: 2.4122 - accuracy: 0.2000 - val_loss: 2.7818 - val_accuracy: 0.1382
Epoch 25/300
16/16 [==============================] - 2s 133ms/step - loss: 2.4150 - accuracy: 0.1657 - val_loss: 2.7156 - val_accuracy: 0.1471
Epoch 26/300
16/16 [==============================] - 2s 137ms/step - loss: 2.2561 - accuracy: 0.2333 - val_loss: 2.7463 - val_accuracy: 0.1265
Epoch 27/300
16/16 [==============================] - 2s 134ms/step - loss: 2.2248 - accuracy: 0.2422 - val_loss: 2.7270 - val_accuracy: 0.1324
Epoch 28/300
16/16 [==============================] - 2s 133ms/step - loss: 2.2507 - accuracy: 0.2157 - val_loss: 2.6779 - val_accuracy: 0.1147
Epoch 29/300
16/16 [==============================] - 2s 143ms/step - loss: 2.1896 - accuracy: 0.2324 - val_loss: 2.7498 - val_accuracy: 0.1559
Epoch 30/300
16/16 [==============================] - 2s 141ms/step - loss: 2.1281 - accuracy: 0.2765 - val_loss: 3.0671 - val_accuracy: 0.1265
Epoch 31/300
16/16 [==============================] - 2s 135ms/step - loss: 2.1421 - accuracy: 0.2745 - val_loss: 2.8823 - val_accuracy: 0.1824
Epoch 32/300
16/16 [==============================] - 2s 131ms/step - loss: 2.2840 - accuracy: 0.2382 - val_loss: 2.6968 - val_accuracy: 0.1471
Epoch 33/300
16/16 [==============================] - 2s 133ms/step - loss: 2.2316 - accuracy: 0.2088 - val_loss: 2.8661 - val_accuracy: 0.1265
Epoch 34/300
16/16 [==============================] - 2s 136ms/step - loss: 2.1506 - accuracy: 0.2412 - val_loss: 2.7138 - val_accuracy: 0.1265
Epoch 35/300
16/16 [==============================] - 2s 137ms/step - loss: 2.2561 - accuracy: 0.2333 - val_loss: 2.7974 - val_accuracy: 0.1265
Epoch 36/300
16/16 [==============================] - 2s 134ms/step - loss: 2.1504 - accuracy: 0.2333 - val_loss: 2.8557 - val_accuracy: 0.1471
Epoch 37/300
16/16 [==============================] - 2s 138ms/step - loss: 2.1434 - accuracy: 0.2520 - val_loss: 2.8147 - val_accuracy: 0.1324
Epoch 38/300
16/16 [==============================] - 2s 137ms/step - loss: 2.0600 - accuracy: 0.2627 - val_loss: 2.6455 - val_accuracy: 0.1912
Epoch 39/300
16/16 [==============================] - 2s 138ms/step - loss: 1.9609 - accuracy: 0.3304 - val_loss: 2.7619 - val_accuracy: 0.2147
Epoch 40/300
16/16 [==============================] - 2s 137ms/step - loss: 2.0203 - accuracy: 0.2892 - val_loss: 2.7804 - val_accuracy: 0.1882
Epoch 41/300
16/16 [==============================] - 2s 135ms/step - loss: 1.8661 - accuracy: 0.3569 - val_loss: 3.0531 - val_accuracy: 0.2088
Epoch 42/300
16/16 [==============================] - 2s 135ms/step - loss: 1.9191 - accuracy: 0.3500 - val_loss: 3.0395 - val_accuracy: 0.1588
Epoch 43/300
16/16 [==============================] - 2s 136ms/step - loss: 2.1404 - accuracy: 0.2402 - val_loss: 2.8537 - val_accuracy: 0.1500
Epoch 44/300
16/16 [==============================] - 2s 135ms/step - loss: 1.9827 - accuracy: 0.2873 - val_loss: 2.9081 - val_accuracy: 0.1676
Epoch 45/300
16/16 [==============================] - 2s 136ms/step - loss: 1.8964 - accuracy: 0.3206 - val_loss: 2.7128 - val_accuracy: 0.1676
Epoch 46/300
16/16 [==============================] - 2s 137ms/step - loss: 1.7697 - accuracy: 0.3618 - val_loss: 2.6902 - val_accuracy: 0.2000
Epoch 47/300
16/16 [==============================] - 2s 136ms/step - loss: 1.7482 - accuracy: 0.3804 - val_loss: 2.5571 - val_accuracy: 0.1971
Epoch 48/300
16/16 [==============================] - 2s 136ms/step - loss: 1.6815 - accuracy: 0.4010 - val_loss: 3.0021 - val_accuracy: 0.2206
Epoch 49/300
16/16 [==============================] - 2s 139ms/step - loss: 1.6965 - accuracy: 0.4088 - val_loss: 2.8126 - val_accuracy: 0.1912
Epoch 50/300
16/16 [==============================] - 2s 135ms/step - loss: 1.7152 - accuracy: 0.4098 - val_loss: 2.5703 - val_accuracy: 0.1912
Epoch 51/300
16/16 [==============================] - 2s 136ms/step - loss: 1.5803 - accuracy: 0.4255 - val_loss: 2.6858 - val_accuracy: 0.2500
Epoch 52/300
16/16 [==============================] - 2s 136ms/step - loss: 1.4770 - accuracy: 0.4637 - val_loss: 2.8977 - val_accuracy: 0.2324
Epoch 53/300
16/16 [==============================] - 2s 138ms/step - loss: 1.4642 - accuracy: 0.4951 - val_loss: 2.8955 - val_accuracy: 0.2529
Epoch 54/300
16/16 [==============================] - 2s 136ms/step - loss: 1.3643 - accuracy: 0.5176 - val_loss: 3.1726 - val_accuracy: 0.2382
Epoch 55/300
16/16 [==============================] - 2s 143ms/step - loss: 1.3872 - accuracy: 0.5176 - val_loss: 2.7549 - val_accuracy: 0.2471
Epoch 56/300
16/16 [==============================] - 2s 144ms/step - loss: 1.4884 - accuracy: 0.4775 - val_loss: 2.6576 - val_accuracy: 0.2529
Epoch 57/300
16/16 [==============================] - 2s 142ms/step - loss: 1.4182 - accuracy: 0.5000 - val_loss: 3.0473 - val_accuracy: 0.2441
Epoch 58/300
16/16 [==============================] - 2s 145ms/step - loss: 1.5055 - accuracy: 0.4549 - val_loss: 2.8511 - val_accuracy: 0.2000
Epoch 59/300
16/16 [==============================] - 2s 144ms/step - loss: 1.6230 - accuracy: 0.4069 - val_loss: 3.4098 - val_accuracy: 0.2382
Epoch 60/300
16/16 [==============================] - 2s 147ms/step - loss: 1.3739 - accuracy: 0.5088 - val_loss: 3.1100 - val_accuracy: 0.2559
Epoch 61/300
16/16 [==============================] - 2s 138ms/step - loss: 1.2955 - accuracy: 0.5167 - val_loss: 2.5337 - val_accuracy: 0.2647
Epoch 62/300
16/16 [==============================] - 2s 135ms/step - loss: 1.1926 - accuracy: 0.5853 - val_loss: 3.0002 - val_accuracy: 0.2676
Epoch 63/300
16/16 [==============================] - 2s 138ms/step - loss: 1.1733 - accuracy: 0.5745 - val_loss: 3.0586 - val_accuracy: 0.2265
Epoch 64/300
16/16 [==============================] - 2s 139ms/step - loss: 1.1920 - accuracy: 0.5559 - val_loss: 3.1372 - val_accuracy: 0.2500
Epoch 65/300
16/16 [==============================] - 2s 138ms/step - loss: 1.1613 - accuracy: 0.5843 - val_loss: 3.1693 - val_accuracy: 0.2824
Epoch 66/300
16/16 [==============================] - 2s 138ms/step - loss: 1.1804 - accuracy: 0.5794 - val_loss: 3.1740 - val_accuracy: 0.2647
Epoch 67/300
16/16 [==============================] - 2s 138ms/step - loss: 1.0956 - accuracy: 0.6020 - val_loss: 2.8976 - val_accuracy: 0.2912
Epoch 68/300
16/16 [==============================] - 2s 141ms/step - loss: 1.1307 - accuracy: 0.5971 - val_loss: 3.1713 - val_accuracy: 0.2912
Epoch 69/300
16/16 [==============================] - 2s 129ms/step - loss: 1.0257 - accuracy: 0.6294 - val_loss: 3.4209 - val_accuracy: 0.2647
Epoch 70/300
16/16 [==============================] - 2s 131ms/step - loss: 1.0395 - accuracy: 0.6147 - val_loss: 2.9965 - val_accuracy: 0.2794
Epoch 71/300
16/16 [==============================] - 2s 132ms/step - loss: 1.1487 - accuracy: 0.5824 - val_loss: 3.1404 - val_accuracy: 0.2853
Epoch 72/300
16/16 [==============================] - 2s 130ms/step - loss: 1.0191 - accuracy: 0.6627 - val_loss: 2.8850 - val_accuracy: 0.2765
Epoch 73/300
16/16 [==============================] - 2s 131ms/step - loss: 1.0148 - accuracy: 0.6314 - val_loss: 3.6034 - val_accuracy: 0.2500
Epoch 74/300
16/16 [==============================] - 2s 131ms/step - loss: 0.9743 - accuracy: 0.6490 - val_loss: 3.0752 - val_accuracy: 0.2853
Epoch 75/300
16/16 [==============================] - 2s 131ms/step - loss: 1.0638 - accuracy: 0.6235 - val_loss: 3.7035 - val_accuracy: 0.2529
Epoch 76/300
16/16 [==============================] - 2s 131ms/step - loss: 1.5192 - accuracy: 0.4931 - val_loss: 3.1945 - val_accuracy: 0.2559
Epoch 77/300
16/16 [==============================] - 2s 131ms/step - loss: 1.3351 - accuracy: 0.5569 - val_loss: 2.9621 - val_accuracy: 0.2676
Epoch 78/300
16/16 [==============================] - 2s 131ms/step - loss: 1.2372 - accuracy: 0.5725 - val_loss: 2.8318 - val_accuracy: 0.2588
Epoch 79/300
16/16 [==============================] - 2s 132ms/step - loss: 1.2461 - accuracy: 0.5902 - val_loss: 3.1768 - val_accuracy: 0.2941
Epoch 80/300
16/16 [==============================] - 2s 132ms/step - loss: 1.1720 - accuracy: 0.6137 - val_loss: 3.1983 - val_accuracy: 0.2853
Epoch 81/300
16/16 [==============================] - 2s 130ms/step - loss: 0.9555 - accuracy: 0.6490 - val_loss: 3.0750 - val_accuracy: 0.2882
Epoch 82/300
16/16 [==============================] - 2s 132ms/step - loss: 0.8213 - accuracy: 0.7010 - val_loss: 3.2857 - val_accuracy: 0.3059
Epoch 83/300
16/16 [==============================] - 2s 131ms/step - loss: 0.7283 - accuracy: 0.7294 - val_loss: 3.0747 - val_accuracy: 0.3029
Epoch 84/300
16/16 [==============================] - 2s 132ms/step - loss: 0.6283 - accuracy: 0.7676 - val_loss: 3.0110 - val_accuracy: 0.2941
Epoch 85/300
16/16 [==============================] - 2s 131ms/step - loss: 0.6757 - accuracy: 0.7480 - val_loss: 3.6708 - val_accuracy: 0.3029
Epoch 86/300
16/16 [==============================] - 2s 131ms/step - loss: 0.7453 - accuracy: 0.7363 - val_loss: 4.8454 - val_accuracy: 0.2618
Epoch 87/300
16/16 [==============================] - 2s 132ms/step - loss: 1.0664 - accuracy: 0.6324 - val_loss: 3.4094 - val_accuracy: 0.3088
Epoch 88/300
16/16 [==============================] - 2s 131ms/step - loss: 0.9836 - accuracy: 0.6578 - val_loss: 3.9803 - val_accuracy: 0.2118
Epoch 89/300
16/16 [==============================] - 2s 132ms/step - loss: 1.3145 - accuracy: 0.5412 - val_loss: 3.2551 - val_accuracy: 0.2941
Epoch 90/300
16/16 [==============================] - 2s 131ms/step - loss: 0.7222 - accuracy: 0.7314 - val_loss: 3.4824 - val_accuracy: 0.3235
Epoch 91/300
16/16 [==============================] - 2s 132ms/step - loss: 0.5637 - accuracy: 0.8029 - val_loss: 3.4694 - val_accuracy: 0.3471
Epoch 92/300
16/16 [==============================] - 2s 132ms/step - loss: 0.4838 - accuracy: 0.8265 - val_loss: 3.9601 - val_accuracy: 0.3235
Epoch 93/300
16/16 [==============================] - 2s 131ms/step - loss: 0.5372 - accuracy: 0.8206 - val_loss: 4.4008 - val_accuracy: 0.3176
Epoch 94/300
16/16 [==============================] - 2s 131ms/step - loss: 0.5791 - accuracy: 0.7833 - val_loss: 4.0031 - val_accuracy: 0.3441
Epoch 95/300
16/16 [==============================] - 2s 131ms/step - loss: 0.4570 - accuracy: 0.8284 - val_loss: 3.4744 - val_accuracy: 0.3529
Epoch 96/300
16/16 [==============================] - 2s 132ms/step - loss: 0.5070 - accuracy: 0.8127 - val_loss: 4.3735 - val_accuracy: 0.3147
Epoch 97/300
16/16 [==============================] - 2s 132ms/step - loss: 0.4794 - accuracy: 0.8275 - val_loss: 3.5123 - val_accuracy: 0.3471
Epoch 98/300
16/16 [==============================] - 2s 131ms/step - loss: 0.3648 - accuracy: 0.8853 - val_loss: 3.8945 - val_accuracy: 0.3529
Epoch 99/300
16/16 [==============================] - 2s 131ms/step - loss: 0.3623 - accuracy: 0.8716 - val_loss: 3.9741 - val_accuracy: 0.3294
Epoch 100/300
16/16 [==============================] - 2s 131ms/step - loss: 0.3377 - accuracy: 0.8873 - val_loss: 4.0935 - val_accuracy: 0.3176
Epoch 101/300
16/16 [==============================] - 2s 132ms/step - loss: 0.3591 - accuracy: 0.8931 - val_loss: 3.8680 - val_accuracy: 0.3529
Epoch 102/300
16/16 [==============================] - 2s 131ms/step - loss: 0.3364 - accuracy: 0.8922 - val_loss: 3.8669 - val_accuracy: 0.3294
Epoch 103/300
16/16 [==============================] - 2s 132ms/step - loss: 0.4061 - accuracy: 0.8657 - val_loss: 3.6109 - val_accuracy: 0.3235
Epoch 104/300
16/16 [==============================] - 2s 132ms/step - loss: 0.4010 - accuracy: 0.8667 - val_loss: 4.1196 - val_accuracy: 0.3441
Epoch 105/300
16/16 [==============================] - 2s 132ms/step - loss: 0.3464 - accuracy: 0.8784 - val_loss: 4.2141 - val_accuracy: 0.3529
Epoch 106/300
16/16 [==============================] - 2s 132ms/step - loss: 0.4346 - accuracy: 0.8333 - val_loss: 3.6804 - val_accuracy: 0.3382
Epoch 107/300
16/16 [==============================] - 2s 132ms/step - loss: 0.3235 - accuracy: 0.8882 - val_loss: 4.5891 - val_accuracy: 0.3265
Epoch 108/300
16/16 [==============================] - 2s 132ms/step - loss: 0.4315 - accuracy: 0.8627 - val_loss: 3.9393 - val_accuracy: 0.3471
Epoch 109/300
16/16 [==============================] - 2s 132ms/step - loss: 0.3062 - accuracy: 0.8912 - val_loss: 3.9857 - val_accuracy: 0.3324
Epoch 110/300
16/16 [==============================] - 2s 136ms/step - loss: 0.2370 - accuracy: 0.9235 - val_loss: 4.0232 - val_accuracy: 0.3559
Epoch 111/300
16/16 [==============================] - 2s 146ms/step - loss: 0.2093 - accuracy: 0.9353 - val_loss: 4.0369 - val_accuracy: 0.3471
Epoch 112/300
16/16 [==============================] - 2s 149ms/step - loss: 0.1741 - accuracy: 0.9451 - val_loss: 4.2416 - val_accuracy: 0.3412
Epoch 113/300
16/16 [==============================] - 2s 148ms/step - loss: 0.2925 - accuracy: 0.8941 - val_loss: 4.2352 - val_accuracy: 0.3471
Epoch 114/300
16/16 [==============================] - 2s 147ms/step - loss: 0.3058 - accuracy: 0.8912 - val_loss: 4.2910 - val_accuracy: 0.3647
Epoch 115/300
16/16 [==============================] - 2s 138ms/step - loss: 0.2385 - accuracy: 0.9265 - val_loss: 4.7895 - val_accuracy: 0.3176
Epoch 116/300
16/16 [==============================] - 2s 138ms/step - loss: 0.1742 - accuracy: 0.9451 - val_loss: 4.5086 - val_accuracy: 0.3529
Epoch 117/300
16/16 [==============================] - 2s 136ms/step - loss: 0.1830 - accuracy: 0.9402 - val_loss: 4.3916 - val_accuracy: 0.3382
Epoch 118/300
16/16 [==============================] - 2s 132ms/step - loss: 0.1382 - accuracy: 0.9657 - val_loss: 4.1489 - val_accuracy: 0.3441
Epoch 119/300
16/16 [==============================] - 2s 135ms/step - loss: 0.1176 - accuracy: 0.9676 - val_loss: 4.7097 - val_accuracy: 0.3294
Epoch 120/300
16/16 [==============================] - 4s 221ms/step - loss: 0.0920 - accuracy: 0.9755 - val_loss: 4.3878 - val_accuracy: 0.3471
Epoch 121/300
16/16 [==============================] - 3s 205ms/step - loss: 0.0774 - accuracy: 0.9824 - val_loss: 4.4032 - val_accuracy: 0.3647
Epoch 122/300
16/16 [==============================] - 3s 206ms/step - loss: 0.0831 - accuracy: 0.9765 - val_loss: 4.4612 - val_accuracy: 0.3500
Epoch 123/300
16/16 [==============================] - 3s 206ms/step - loss: 0.0670 - accuracy: 0.9892 - val_loss: 4.4198 - val_accuracy: 0.3529
Epoch 124/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0634 - accuracy: 0.9882 - val_loss: 4.4810 - val_accuracy: 0.3382
Epoch 125/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0540 - accuracy: 0.9902 - val_loss: 4.6029 - val_accuracy: 0.3618
Epoch 126/300
16/16 [==============================] - 3s 206ms/step - loss: 0.0469 - accuracy: 0.9912 - val_loss: 4.5810 - val_accuracy: 0.3529
Epoch 127/300
16/16 [==============================] - 3s 205ms/step - loss: 0.0435 - accuracy: 0.9922 - val_loss: 4.4789 - val_accuracy: 0.3529
Epoch 128/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0488 - accuracy: 0.9902 - val_loss: 4.6879 - val_accuracy: 0.3500
Epoch 129/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0469 - accuracy: 0.9902 - val_loss: 4.4574 - val_accuracy: 0.3706
Epoch 130/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0457 - accuracy: 0.9892 - val_loss: 4.7947 - val_accuracy: 0.3588
Epoch 131/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0431 - accuracy: 0.9951 - val_loss: 4.6743 - val_accuracy: 0.3588
Epoch 132/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0367 - accuracy: 0.9931 - val_loss: 4.7409 - val_accuracy: 0.3500
Epoch 133/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0363 - accuracy: 0.9931 - val_loss: 4.8555 - val_accuracy: 0.3676
Epoch 134/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0337 - accuracy: 0.9951 - val_loss: 4.6904 - val_accuracy: 0.3647
Epoch 135/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0330 - accuracy: 0.9961 - val_loss: 4.7819 - val_accuracy: 0.3647
Epoch 136/300
16/16 [==============================] - 3s 197ms/step - loss: 0.0278 - accuracy: 0.9961 - val_loss: 4.8103 - val_accuracy: 0.3559
Epoch 137/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0246 - accuracy: 0.9961 - val_loss: 4.7038 - val_accuracy: 0.3647
Epoch 138/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0228 - accuracy: 0.9980 - val_loss: 4.7662 - val_accuracy: 0.3588
Epoch 139/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0226 - accuracy: 0.9980 - val_loss: 4.7710 - val_accuracy: 0.3529
Epoch 140/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0226 - accuracy: 0.9980 - val_loss: 4.9085 - val_accuracy: 0.3588
Epoch 141/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0204 - accuracy: 0.9990 - val_loss: 4.8184 - val_accuracy: 0.3706
Epoch 142/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0190 - accuracy: 0.9980 - val_loss: 4.8285 - val_accuracy: 0.3676
Epoch 143/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0173 - accuracy: 0.9980 - val_loss: 4.8606 - val_accuracy: 0.3676
Epoch 144/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0166 - accuracy: 0.9980 - val_loss: 4.8625 - val_accuracy: 0.3676
Epoch 145/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0158 - accuracy: 0.9990 - val_loss: 4.9039 - val_accuracy: 0.3676
Epoch 146/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0151 - accuracy: 0.9990 - val_loss: 4.8582 - val_accuracy: 0.3676
Epoch 147/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0147 - accuracy: 0.9990 - val_loss: 4.9744 - val_accuracy: 0.3647
Epoch 148/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0142 - accuracy: 0.9990 - val_loss: 4.9702 - val_accuracy: 0.3676
Epoch 149/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0140 - accuracy: 0.9990 - val_loss: 5.0103 - val_accuracy: 0.3794
Epoch 150/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0136 - accuracy: 0.9990 - val_loss: 5.0019 - val_accuracy: 0.3647
Epoch 151/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0127 - accuracy: 0.9990 - val_loss: 5.0022 - val_accuracy: 0.3676
Epoch 152/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0126 - accuracy: 0.9990 - val_loss: 4.9994 - val_accuracy: 0.3647
Epoch 153/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0121 - accuracy: 0.9990 - val_loss: 5.0140 - val_accuracy: 0.3706
Epoch 154/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0122 - accuracy: 0.9990 - val_loss: 5.0362 - val_accuracy: 0.3706
Epoch 155/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0114 - accuracy: 0.9990 - val_loss: 5.0541 - val_accuracy: 0.3676
Epoch 156/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0112 - accuracy: 0.9990 - val_loss: 5.1180 - val_accuracy: 0.3735
Epoch 157/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0109 - accuracy: 0.9990 - val_loss: 5.1252 - val_accuracy: 0.3618
Epoch 158/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0103 - accuracy: 0.9990 - val_loss: 5.0361 - val_accuracy: 0.3618
Epoch 159/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0101 - accuracy: 0.9990 - val_loss: 5.1265 - val_accuracy: 0.3618
Epoch 160/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0099 - accuracy: 0.9990 - val_loss: 5.1558 - val_accuracy: 0.3618
Epoch 161/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0094 - accuracy: 0.9990 - val_loss: 5.2107 - val_accuracy: 0.3676
Epoch 162/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0098 - accuracy: 0.9990 - val_loss: 5.2093 - val_accuracy: 0.3765
Epoch 163/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0090 - accuracy: 1.0000 - val_loss: 5.2114 - val_accuracy: 0.3676
Epoch 164/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0088 - accuracy: 1.0000 - val_loss: 5.1634 - val_accuracy: 0.3647
Epoch 165/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0086 - accuracy: 1.0000 - val_loss: 5.3253 - val_accuracy: 0.3588
Epoch 166/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0083 - accuracy: 1.0000 - val_loss: 5.3348 - val_accuracy: 0.3706
Epoch 167/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0080 - accuracy: 0.9990 - val_loss: 5.3204 - val_accuracy: 0.3735
Epoch 168/300
16/16 [==============================] - 3s 205ms/step - loss: 0.0080 - accuracy: 0.9990 - val_loss: 5.4273 - val_accuracy: 0.3706
Epoch 169/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0089 - accuracy: 0.9990 - val_loss: 5.5091 - val_accuracy: 0.3588
Epoch 170/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0075 - accuracy: 1.0000 - val_loss: 5.3584 - val_accuracy: 0.3647
Epoch 171/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0066 - accuracy: 1.0000 - val_loss: 5.4365 - val_accuracy: 0.3618
Epoch 172/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0067 - accuracy: 1.0000 - val_loss: 5.4686 - val_accuracy: 0.3735
Epoch 173/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0064 - accuracy: 1.0000 - val_loss: 5.5026 - val_accuracy: 0.3618
Epoch 174/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0062 - accuracy: 1.0000 - val_loss: 5.4729 - val_accuracy: 0.3618
Epoch 175/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0065 - accuracy: 1.0000 - val_loss: 5.5618 - val_accuracy: 0.3559
Epoch 176/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0061 - accuracy: 1.0000 - val_loss: 5.5513 - val_accuracy: 0.3559
Epoch 177/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0058 - accuracy: 1.0000 - val_loss: 5.4538 - val_accuracy: 0.3647
Epoch 178/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0055 - accuracy: 1.0000 - val_loss: 5.5639 - val_accuracy: 0.3647
Epoch 179/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 5.5566 - val_accuracy: 0.3676
Epoch 180/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0052 - accuracy: 1.0000 - val_loss: 5.5866 - val_accuracy: 0.3676
Epoch 181/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0050 - accuracy: 1.0000 - val_loss: 5.6076 - val_accuracy: 0.3588
Epoch 182/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0048 - accuracy: 1.0000 - val_loss: 5.6233 - val_accuracy: 0.3618
Epoch 183/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0047 - accuracy: 1.0000 - val_loss: 5.7047 - val_accuracy: 0.3676
Epoch 184/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0047 - accuracy: 1.0000 - val_loss: 5.6491 - val_accuracy: 0.3618
Epoch 185/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0046 - accuracy: 1.0000 - val_loss: 5.7154 - val_accuracy: 0.3588
Epoch 186/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0044 - accuracy: 1.0000 - val_loss: 5.7037 - val_accuracy: 0.3647
Epoch 187/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0043 - accuracy: 1.0000 - val_loss: 5.7276 - val_accuracy: 0.3559
Epoch 188/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0042 - accuracy: 1.0000 - val_loss: 5.7053 - val_accuracy: 0.3676
Epoch 189/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0041 - accuracy: 1.0000 - val_loss: 5.7540 - val_accuracy: 0.3588
Epoch 190/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0039 - accuracy: 1.0000 - val_loss: 5.7438 - val_accuracy: 0.3618
Epoch 191/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0039 - accuracy: 1.0000 - val_loss: 5.7816 - val_accuracy: 0.3647
Epoch 192/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0040 - accuracy: 1.0000 - val_loss: 5.8787 - val_accuracy: 0.3529
Epoch 193/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0042 - accuracy: 1.0000 - val_loss: 5.7073 - val_accuracy: 0.3706
Epoch 194/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0040 - accuracy: 1.0000 - val_loss: 5.8270 - val_accuracy: 0.3676
Epoch 195/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0037 - accuracy: 1.0000 - val_loss: 5.8102 - val_accuracy: 0.3588
Epoch 196/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0036 - accuracy: 1.0000 - val_loss: 5.8473 - val_accuracy: 0.3676
Epoch 197/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0035 - accuracy: 1.0000 - val_loss: 5.8429 - val_accuracy: 0.3588
Epoch 198/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0034 - accuracy: 1.0000 - val_loss: 5.8925 - val_accuracy: 0.3588
Epoch 199/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0033 - accuracy: 1.0000 - val_loss: 5.8649 - val_accuracy: 0.3588
Epoch 200/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0036 - accuracy: 1.0000 - val_loss: 5.8467 - val_accuracy: 0.3647
Epoch 201/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0034 - accuracy: 1.0000 - val_loss: 5.9540 - val_accuracy: 0.3559
Epoch 202/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0032 - accuracy: 1.0000 - val_loss: 5.9250 - val_accuracy: 0.3500
Epoch 203/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0031 - accuracy: 1.0000 - val_loss: 6.0010 - val_accuracy: 0.3588
Epoch 204/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0030 - accuracy: 1.0000 - val_loss: 5.9693 - val_accuracy: 0.3618
Epoch 205/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0028 - accuracy: 1.0000 - val_loss: 5.9947 - val_accuracy: 0.3529
Epoch 206/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0026 - accuracy: 1.0000 - val_loss: 5.9602 - val_accuracy: 0.3588
Epoch 207/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0025 - accuracy: 1.0000 - val_loss: 5.9786 - val_accuracy: 0.3618
Epoch 208/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0024 - accuracy: 1.0000 - val_loss: 5.9981 - val_accuracy: 0.3706
Epoch 209/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0024 - accuracy: 1.0000 - val_loss: 6.0203 - val_accuracy: 0.3647
Epoch 210/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0023 - accuracy: 1.0000 - val_loss: 6.0552 - val_accuracy: 0.3500
Epoch 211/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0023 - accuracy: 1.0000 - val_loss: 6.0396 - val_accuracy: 0.3529
Epoch 212/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0022 - accuracy: 1.0000 - val_loss: 6.0731 - val_accuracy: 0.3588
Epoch 213/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0022 - accuracy: 1.0000 - val_loss: 6.0745 - val_accuracy: 0.3618
Epoch 214/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0021 - accuracy: 1.0000 - val_loss: 6.0868 - val_accuracy: 0.3529
Epoch 215/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0021 - accuracy: 1.0000 - val_loss: 6.1148 - val_accuracy: 0.3588
Epoch 216/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0021 - accuracy: 1.0000 - val_loss: 6.1265 - val_accuracy: 0.3647
Epoch 217/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0020 - accuracy: 1.0000 - val_loss: 6.1336 - val_accuracy: 0.3559
Epoch 218/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0019 - accuracy: 1.0000 - val_loss: 6.1471 - val_accuracy: 0.3618
Epoch 219/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0019 - accuracy: 1.0000 - val_loss: 6.1560 - val_accuracy: 0.3588
Epoch 220/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0019 - accuracy: 1.0000 - val_loss: 6.1567 - val_accuracy: 0.3529
Epoch 221/300
16/16 [==============================] - 3s 205ms/step - loss: 0.0018 - accuracy: 1.0000 - val_loss: 6.1798 - val_accuracy: 0.3618
Epoch 222/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0018 - accuracy: 1.0000 - val_loss: 6.2008 - val_accuracy: 0.3588
Epoch 223/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0018 - accuracy: 1.0000 - val_loss: 6.2290 - val_accuracy: 0.3559
Epoch 224/300
16/16 [==============================] - 3s 217ms/step - loss: 0.0018 - accuracy: 1.0000 - val_loss: 6.1914 - val_accuracy: 0.3647
Epoch 225/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0017 - accuracy: 1.0000 - val_loss: 6.2070 - val_accuracy: 0.3676
Epoch 226/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0017 - accuracy: 1.0000 - val_loss: 6.2446 - val_accuracy: 0.3559
Epoch 227/300
16/16 [==============================] - 3s 204ms/step - loss: 0.0017 - accuracy: 1.0000 - val_loss: 6.2598 - val_accuracy: 0.3647
Epoch 228/300
16/16 [==============================] - 3s 205ms/step - loss: 0.0016 - accuracy: 1.0000 - val_loss: 6.2418 - val_accuracy: 0.3618
Epoch 229/300
16/16 [==============================] - 3s 203ms/step - loss: 0.0016 - accuracy: 1.0000 - val_loss: 6.2587 - val_accuracy: 0.3647
Epoch 230/300
16/16 [==============================] - 3s 188ms/step - loss: 0.0016 - accuracy: 1.0000 - val_loss: 6.2825 - val_accuracy: 0.3618
Epoch 231/300
16/16 [==============================] - 3s 193ms/step - loss: 0.0016 - accuracy: 1.0000 - val_loss: 6.3120 - val_accuracy: 0.3647
Epoch 232/300
16/16 [==============================] - 3s 192ms/step - loss: 0.0015 - accuracy: 1.0000 - val_loss: 6.3184 - val_accuracy: 0.3618
Epoch 233/300
16/16 [==============================] - 3s 191ms/step - loss: 0.0015 - accuracy: 1.0000 - val_loss: 6.2914 - val_accuracy: 0.3676
Epoch 234/300
16/16 [==============================] - 3s 188ms/step - loss: 0.0015 - accuracy: 1.0000 - val_loss: 6.3014 - val_accuracy: 0.3706
Epoch 235/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0015 - accuracy: 1.0000 - val_loss: 6.3135 - val_accuracy: 0.3588
Epoch 236/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0014 - accuracy: 1.0000 - val_loss: 6.3441 - val_accuracy: 0.3676
Epoch 237/300
16/16 [==============================] - 3s 202ms/step - loss: 0.0014 - accuracy: 1.0000 - val_loss: 6.3416 - val_accuracy: 0.3676
Epoch 238/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0014 - accuracy: 1.0000 - val_loss: 6.3789 - val_accuracy: 0.3676
Epoch 239/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0014 - accuracy: 1.0000 - val_loss: 6.3784 - val_accuracy: 0.3647
Epoch 240/300
16/16 [==============================] - 3s 198ms/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 6.3826 - val_accuracy: 0.3676
Epoch 241/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 6.4209 - val_accuracy: 0.3559
Epoch 242/300
16/16 [==============================] - 3s 197ms/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 6.4536 - val_accuracy: 0.3588
Epoch 243/300
16/16 [==============================] - 3s 197ms/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 6.4082 - val_accuracy: 0.3706
Epoch 244/300
16/16 [==============================] - 3s 197ms/step - loss: 0.0012 - accuracy: 1.0000 - val_loss: 6.4605 - val_accuracy: 0.3647
Epoch 245/300
16/16 [==============================] - 3s 195ms/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 6.4709 - val_accuracy: 0.3500
Epoch 246/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0012 - accuracy: 1.0000 - val_loss: 6.4637 - val_accuracy: 0.3647
Epoch 247/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0012 - accuracy: 1.0000 - val_loss: 6.4483 - val_accuracy: 0.3706
Epoch 248/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 6.4912 - val_accuracy: 0.3706
Epoch 249/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0012 - accuracy: 1.0000 - val_loss: 6.4871 - val_accuracy: 0.3676
Epoch 250/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 6.5237 - val_accuracy: 0.3618
Epoch 251/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 6.5487 - val_accuracy: 0.3618
Epoch 252/300
16/16 [==============================] - 3s 199ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 6.5384 - val_accuracy: 0.3618
Epoch 253/300
16/16 [==============================] - 3s 200ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 6.5366 - val_accuracy: 0.3618
Epoch 254/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0010 - accuracy: 1.0000 - val_loss: 6.5678 - val_accuracy: 0.3676
Epoch 255/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0010 - accuracy: 1.0000 - val_loss: 6.5705 - val_accuracy: 0.3676
Epoch 256/300
16/16 [==============================] - 3s 201ms/step - loss: 0.0010 - accuracy: 1.0000 - val_loss: 6.5754 - val_accuracy: 0.3676
Epoch 257/300
16/16 [==============================] - 3s 200ms/step - loss: 9.8164e-04 - accuracy: 1.0000 - val_loss: 6.5994 - val_accuracy: 0.3647
Epoch 258/300
16/16 [==============================] - 3s 204ms/step - loss: 9.6949e-04 - accuracy: 1.0000 - val_loss: 6.6420 - val_accuracy: 0.3706
Epoch 259/300
16/16 [==============================] - 3s 200ms/step - loss: 9.6889e-04 - accuracy: 1.0000 - val_loss: 6.6273 - val_accuracy: 0.3618
Epoch 260/300
16/16 [==============================] - 3s 201ms/step - loss: 9.4119e-04 - accuracy: 1.0000 - val_loss: 6.6474 - val_accuracy: 0.3765
Epoch 261/300
16/16 [==============================] - 3s 200ms/step - loss: 9.1795e-04 - accuracy: 1.0000 - val_loss: 6.6512 - val_accuracy: 0.3706
Epoch 262/300
16/16 [==============================] - 3s 199ms/step - loss: 9.1812e-04 - accuracy: 1.0000 - val_loss: 6.6505 - val_accuracy: 0.3706
Epoch 263/300
16/16 [==============================] - 3s 200ms/step - loss: 8.8485e-04 - accuracy: 1.0000 - val_loss: 6.6673 - val_accuracy: 0.3676
Epoch 264/300
16/16 [==============================] - 3s 200ms/step - loss: 8.8162e-04 - accuracy: 1.0000 - val_loss: 6.7018 - val_accuracy: 0.3676
Epoch 265/300
16/16 [==============================] - 3s 200ms/step - loss: 8.5433e-04 - accuracy: 1.0000 - val_loss: 6.6824 - val_accuracy: 0.3735
Epoch 266/300
16/16 [==============================] - 3s 198ms/step - loss: 8.4205e-04 - accuracy: 1.0000 - val_loss: 6.7091 - val_accuracy: 0.3735
Epoch 267/300
16/16 [==============================] - 3s 201ms/step - loss: 8.3501e-04 - accuracy: 1.0000 - val_loss: 6.6921 - val_accuracy: 0.3706
Epoch 268/300
16/16 [==============================] - 3s 201ms/step - loss: 8.1630e-04 - accuracy: 1.0000 - val_loss: 6.7509 - val_accuracy: 0.3706
Epoch 269/300
16/16 [==============================] - 3s 199ms/step - loss: 7.9647e-04 - accuracy: 1.0000 - val_loss: 6.7359 - val_accuracy: 0.3676
Epoch 270/300
16/16 [==============================] - 3s 200ms/step - loss: 7.7917e-04 - accuracy: 1.0000 - val_loss: 6.7679 - val_accuracy: 0.3706
Epoch 271/300
16/16 [==============================] - 3s 199ms/step - loss: 7.6657e-04 - accuracy: 1.0000 - val_loss: 6.7652 - val_accuracy: 0.3676
Epoch 272/300
16/16 [==============================] - 3s 201ms/step - loss: 7.5118e-04 - accuracy: 1.0000 - val_loss: 6.7742 - val_accuracy: 0.3735
Epoch 273/300
16/16 [==============================] - 3s 198ms/step - loss: 7.3695e-04 - accuracy: 1.0000 - val_loss: 6.7841 - val_accuracy: 0.3706
Epoch 274/300
16/16 [==============================] - 3s 199ms/step - loss: 7.2571e-04 - accuracy: 1.0000 - val_loss: 6.7908 - val_accuracy: 0.3794
Epoch 275/300
16/16 [==============================] - 3s 199ms/step - loss: 7.1165e-04 - accuracy: 1.0000 - val_loss: 6.8078 - val_accuracy: 0.3706
Epoch 276/300
16/16 [==============================] - 3s 199ms/step - loss: 7.0199e-04 - accuracy: 1.0000 - val_loss: 6.8148 - val_accuracy: 0.3735
Epoch 277/300
16/16 [==============================] - 3s 200ms/step - loss: 6.9624e-04 - accuracy: 1.0000 - val_loss: 6.8561 - val_accuracy: 0.3706
Epoch 278/300
16/16 [==============================] - 3s 199ms/step - loss: 6.8146e-04 - accuracy: 1.0000 - val_loss: 6.8691 - val_accuracy: 0.3765
Epoch 279/300
16/16 [==============================] - 3s 198ms/step - loss: 6.6960e-04 - accuracy: 1.0000 - val_loss: 6.8602 - val_accuracy: 0.3765
Epoch 280/300
16/16 [==============================] - 3s 198ms/step - loss: 6.5414e-04 - accuracy: 1.0000 - val_loss: 6.8811 - val_accuracy: 0.3735
Epoch 281/300
16/16 [==============================] - 3s 199ms/step - loss: 6.4479e-04 - accuracy: 1.0000 - val_loss: 6.8847 - val_accuracy: 0.3706
Epoch 282/300
16/16 [==============================] - 3s 200ms/step - loss: 6.3757e-04 - accuracy: 1.0000 - val_loss: 6.9023 - val_accuracy: 0.3735
Epoch 283/300
16/16 [==============================] - 3s 200ms/step - loss: 6.2702e-04 - accuracy: 1.0000 - val_loss: 6.8823 - val_accuracy: 0.3765
Epoch 284/300
16/16 [==============================] - 3s 201ms/step - loss: 6.2100e-04 - accuracy: 1.0000 - val_loss: 6.9147 - val_accuracy: 0.3735
Epoch 285/300
16/16 [==============================] - 3s 199ms/step - loss: 6.0546e-04 - accuracy: 1.0000 - val_loss: 6.9506 - val_accuracy: 0.3706
Epoch 286/300
16/16 [==============================] - 3s 203ms/step - loss: 5.8950e-04 - accuracy: 1.0000 - val_loss: 6.9532 - val_accuracy: 0.3647
Epoch 287/300
16/16 [==============================] - 3s 201ms/step - loss: 5.9201e-04 - accuracy: 1.0000 - val_loss: 6.9619 - val_accuracy: 0.3765
Epoch 288/300
16/16 [==============================] - 3s 201ms/step - loss: 5.6901e-04 - accuracy: 1.0000 - val_loss: 6.9596 - val_accuracy: 0.3794
Epoch 289/300
16/16 [==============================] - 3s 204ms/step - loss: 5.6160e-04 - accuracy: 1.0000 - val_loss: 6.9937 - val_accuracy: 0.3765
Epoch 290/300
16/16 [==============================] - 3s 202ms/step - loss: 5.4976e-04 - accuracy: 1.0000 - val_loss: 6.9818 - val_accuracy: 0.3735
Epoch 291/300
16/16 [==============================] - 3s 202ms/step - loss: 5.4465e-04 - accuracy: 1.0000 - val_loss: 7.0242 - val_accuracy: 0.3735
Epoch 292/300
16/16 [==============================] - 3s 201ms/step - loss: 5.2819e-04 - accuracy: 1.0000 - val_loss: 6.9923 - val_accuracy: 0.3794
Epoch 293/300
16/16 [==============================] - 3s 202ms/step - loss: 5.1764e-04 - accuracy: 1.0000 - val_loss: 7.0487 - val_accuracy: 0.3765
Epoch 294/300
16/16 [==============================] - 3s 200ms/step - loss: 5.1238e-04 - accuracy: 1.0000 - val_loss: 7.0379 - val_accuracy: 0.3824
Epoch 295/300
16/16 [==============================] - 3s 203ms/step - loss: 4.9777e-04 - accuracy: 1.0000 - val_loss: 7.0372 - val_accuracy: 0.3794
Epoch 296/300
16/16 [==============================] - 3s 204ms/step - loss: 5.0061e-04 - accuracy: 1.0000 - val_loss: 7.0821 - val_accuracy: 0.3794
Epoch 297/300
16/16 [==============================] - 3s 203ms/step - loss: 4.8554e-04 - accuracy: 1.0000 - val_loss: 7.0916 - val_accuracy: 0.3706
Epoch 298/300
16/16 [==============================] - 3s 204ms/step - loss: 4.7837e-04 - accuracy: 1.0000 - val_loss: 7.0773 - val_accuracy: 0.3765
Epoch 299/300
16/16 [==============================] - 3s 214ms/step - loss: 4.6823e-04 - accuracy: 1.0000 - val_loss: 7.0882 - val_accuracy: 0.3794
Epoch 300/300
16/16 [==============================] - 3s 201ms/step - loss: 4.6146e-04 - accuracy: 1.0000 - val_loss: 7.1199 - val_accuracy: 0.3794
############### Total Time Taken:  15 Minutes #############
In [20]:
print('Validation accuracy using ANN for flower classifier is : ', max(history.history['val_accuracy'])*100,'%')
Validation accuracy using ANN for flower classifier is :  38.235294818878174 %
In [23]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(history.history['loss'], label='train loss')
plt.plot(history.history['val_loss'], label='val loss')
plt.legend()
plt.show()

# plot the accuracy
plt.plot(history.history['accuracy'], label='train acc')
plt.plot(history.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
In [25]:
# save it as a pickle file
from tensorflow.keras.models import load_model
flowers_model.save('flower_model_ann.pkl')
WARNING:tensorflow:From C:\Users\admin\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\training\tracking\tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
WARNING:tensorflow:From C:\Users\admin\AppData\Roaming\Python\Python38\site-packages\tensorflow\python\training\tracking\tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.
Instructions for updating:
This property should not be used in TensorFlow 2.0, as updates are applied automatically.
INFO:tensorflow:Assets written to: flower_model_ann.pkl\assets

Flower Classifier - Using Convolutional Neural Networks

In [26]:
# Deep Learning CNN model to recognize the flower

'''########################## IMAGE PRE-PROCESSING for TRAINING and TESTING data ##############################'''

TrainingImagePath='C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\flowers'

from keras.preprocessing.image import ImageDataGenerator

# Defining pre-processing transformations on raw images of training data
train_datagen = ImageDataGenerator(
        rescale=1./255,
        shear_range=0.1,
        zoom_range=0.1,
        horizontal_flip=True,
        validation_split=0.25)

# Generating the Training Data
training_set = train_datagen.flow_from_directory(
        TrainingImagePath,
        target_size=(224, 224),
        batch_size=32,
        class_mode='categorical',
        subset='training')


# Generating the Validation Data
validation_set = train_datagen.flow_from_directory(
        TrainingImagePath,
        target_size=(224, 224),
        batch_size=32,
        class_mode='categorical',
        subset='validation')

# Printing class labels for each face
# test_set.class_indices
Found 1020 images belonging to 17 classes.
Found 340 images belonging to 17 classes.
In [27]:
'''#################### Creating lookup table for all flowers ##############################'''

# class_indices have the numeric tag for each flower
TrainClasses=training_set.class_indices

# Storing the flower and the numeric tag for future reference
ResultMap={}
for flowerValue,flowerName in zip(TrainClasses.values(),TrainClasses.keys()):
    ResultMap[flowerValue]=flowerName

# Saving the flower map for future reference
import pickle
with open("C:\\Users\\admin\\Desktop\\Great Learning\\Computer Vision\\Project\\ResultsMap_flowers.pkl", 'wb') as f:
    pickle.dump(ResultMap, f, pickle.HIGHEST_PROTOCOL)

print("Mapping of Flower Type and its ID",ResultMap)

# The number of neurons for the output layer is equal to the number of flowers
OutputNeurons=len(ResultMap)
print('\n The Number of output neurons: ', OutputNeurons)
Mapping of Flower Type and its ID {0: '0', 1: '1', 2: '10', 3: '11', 4: '12', 5: '13', 6: '14', 7: '15', 8: '16', 9: '2', 10: '3', 11: '4', 12: '5', 13: '6', 14: '7', 15: '8', 16: '9'}

 The Number of output neurons:  17
In [28]:
'''######################## Creating CNN deep learning model ####################################'''

from keras.models import Sequential
from keras.layers import Convolution2D
from keras.layers import MaxPool2D
from keras.layers import Flatten
from keras.layers import Dense

'''Initializing the Convolutional Neural Network'''
flower_classifier= Sequential()

''' STEP--1 Convolution
# Adding the first layer of CNN
# we are using the format (224,224,3) because we are using TensorFlow backend
# It means 3 matrix of size (224X224) pixels representing Red, Green and Blue components of pixels
'''
flower_classifier.add(Convolution2D(32, kernel_size=(5, 5), strides=(1, 1), input_shape=(224,224,3), activation='relu'))

'''# STEP--2 MAX Pooling'''
flower_classifier.add(MaxPool2D(pool_size=(2,2)))

'''############## ADDITIONAL LAYER of CONVOLUTION for better accuracy #################'''
flower_classifier.add(Convolution2D(64, kernel_size=(5, 5), strides=(1, 1), activation='relu'))

flower_classifier.add(MaxPool2D(pool_size=(2,2)))


'''# STEP--3 FLattening'''
flower_classifier.add(Flatten())

'''# STEP--4 Fully Connected Neural Network'''
flower_classifier.add(Dense(512, activation='relu'))

flower_classifier.add(Dense(OutputNeurons, activation='softmax'))

'''# Compiling the CNN'''
#flower_classifier.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
flower_classifier.compile(loss='categorical_crossentropy', optimizer = 'adam', metrics=["accuracy"])
In [29]:
import time
# Measuring the time taken by the model to train
StartTime=time.time()

# Starting the model training
history = flower_classifier.fit_generator(
                    training_set,
                    steps_per_epoch=30,validation_data=validation_set,
                    epochs=25)

EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
WARNING:tensorflow:From <ipython-input-29-b0d2955320fc>:6: Model.fit_generator (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.
Instructions for updating:
Please use Model.fit, which supports generators.
Epoch 1/25
30/30 [==============================] - 169s 6s/step - loss: 4.5592 - accuracy: 0.1538 - val_loss: 1.9159 - val_accuracy: 0.3500
Epoch 2/25
30/30 [==============================] - 157s 5s/step - loss: 1.9003 - accuracy: 0.3672 - val_loss: 1.5436 - val_accuracy: 0.4294
Epoch 3/25
30/30 [==============================] - 156s 5s/step - loss: 1.5928 - accuracy: 0.4749 - val_loss: 1.6651 - val_accuracy: 0.4294
Epoch 4/25
30/30 [==============================] - 164s 5s/step - loss: 1.3457 - accuracy: 0.5439 - val_loss: 1.3156 - val_accuracy: 0.5265
Epoch 5/25
30/30 [==============================] - 157s 5s/step - loss: 1.1063 - accuracy: 0.6339 - val_loss: 1.5771 - val_accuracy: 0.5294
Epoch 6/25
30/30 [==============================] - 157s 5s/step - loss: 1.0563 - accuracy: 0.6548 - val_loss: 1.5540 - val_accuracy: 0.5353
Epoch 7/25
30/30 [==============================] - 157s 5s/step - loss: 0.8675 - accuracy: 0.7155 - val_loss: 1.2308 - val_accuracy: 0.5882
Epoch 8/25
30/30 [==============================] - 156s 5s/step - loss: 0.7966 - accuracy: 0.7448 - val_loss: 1.4902 - val_accuracy: 0.5500
Epoch 9/25
30/30 [==============================] - 156s 5s/step - loss: 0.6059 - accuracy: 0.7971 - val_loss: 1.7675 - val_accuracy: 0.5676
Epoch 10/25
30/30 [==============================] - 164s 5s/step - loss: 0.5788 - accuracy: 0.8138 - val_loss: 1.7239 - val_accuracy: 0.5059
Epoch 11/25
30/30 [==============================] - 157s 5s/step - loss: 0.6598 - accuracy: 0.7896 - val_loss: 1.3617 - val_accuracy: 0.6118
Epoch 12/25
30/30 [==============================] - 156s 5s/step - loss: 0.4409 - accuracy: 0.8577 - val_loss: 1.9613 - val_accuracy: 0.5382
Epoch 13/25
30/30 [==============================] - 164s 5s/step - loss: 0.4431 - accuracy: 0.8504 - val_loss: 1.5987 - val_accuracy: 0.5588
Epoch 14/25
30/30 [==============================] - 157s 5s/step - loss: 0.3725 - accuracy: 0.8975 - val_loss: 1.6840 - val_accuracy: 0.5559
Epoch 15/25
30/30 [==============================] - 156s 5s/step - loss: 0.2276 - accuracy: 0.9320 - val_loss: 1.9872 - val_accuracy: 0.5794
Epoch 16/25
30/30 [==============================] - 165s 5s/step - loss: 0.2239 - accuracy: 0.9104 - val_loss: 1.9681 - val_accuracy: 0.5765
Epoch 17/25
30/30 [==============================] - 157s 5s/step - loss: 0.2268 - accuracy: 0.9226 - val_loss: 1.9645 - val_accuracy: 0.5676
Epoch 18/25
30/30 [==============================] - 157s 5s/step - loss: 0.2872 - accuracy: 0.9094 - val_loss: 1.9474 - val_accuracy: 0.5971
Epoch 19/25
30/30 [==============================] - 156s 5s/step - loss: 0.1786 - accuracy: 0.9393 - val_loss: 1.8632 - val_accuracy: 0.6029
Epoch 20/25
30/30 [==============================] - 157s 5s/step - loss: 0.1630 - accuracy: 0.9477 - val_loss: 2.1257 - val_accuracy: 0.5941
Epoch 21/25
30/30 [==============================] - 163s 5s/step - loss: 0.1564 - accuracy: 0.9540 - val_loss: 1.8168 - val_accuracy: 0.6235
Epoch 22/25
30/30 [==============================] - 157s 5s/step - loss: 0.0967 - accuracy: 0.9697 - val_loss: 2.0363 - val_accuracy: 0.6235
Epoch 23/25
30/30 [==============================] - 157s 5s/step - loss: 0.0876 - accuracy: 0.9749 - val_loss: 2.0910 - val_accuracy: 0.6059
Epoch 24/25
30/30 [==============================] - 161s 5s/step - loss: 0.1044 - accuracy: 0.9686 - val_loss: 2.1775 - val_accuracy: 0.6059
Epoch 25/25
30/30 [==============================] - 156s 5s/step - loss: 0.2018 - accuracy: 0.9383 - val_loss: 2.2769 - val_accuracy: 0.5794
############### Total Time Taken:  69 Minutes #############
In [30]:
results = flower_classifier.evaluate(validation_set)
print('Validation accuracy for flower classifier using CNN is : ', results[1]*100,'%')
11/11 [==============================] - 17s 2s/step - loss: 2.4046 - accuracy: 0.5941
Validation accuracy for flower classifier using CNN is :  59.41176414489746 %
In [31]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(history.history['loss'], label='train loss')
plt.plot(history.history['val_loss'], label='val loss')
plt.legend()
plt.show()

# plot the accuracy
plt.plot(history.history['accuracy'], label='train acc')
plt.plot(history.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
In [32]:
# save it as a pickle file
from tensorflow.keras.models import load_model
flower_classifier.save('flower_classifier_cnn.pkl')
INFO:tensorflow:Assets written to: flower_classifier_cnn.pkl\assets
In [33]:
'''########################## Making single predictions ############################'''
import numpy as np
from keras.preprocessing import image

testImage='C:/Users/admin/Desktop/Great Learning/Computer Vision/Project/predict-flower.jpg'
test_image=image.load_img(testImage,target_size=(224, 224))
test_image=image.img_to_array(test_image)

test_image=np.expand_dims(test_image,axis=0)

result=flower_classifier.predict(test_image,verbose=0)
#print(training_set.class_indices)

print('####'*10)
print('Prediction is: ',ResultMap[np.argmax(result)])
########################################
Prediction is:  15

Flower Classifier - Using Transfer Learning Techniques

In [34]:
from tensorflow.keras.layers import Input, Lambda, Dense, Flatten
from tensorflow.keras.models import Model
from tensorflow.keras.applications.inception_v3 import InceptionV3
from tensorflow.keras.applications.inception_v3 import preprocess_input
from tensorflow.keras.applications.vgg16 import VGG16
from tensorflow.keras.applications.vgg16 import preprocess_input
from tensorflow.keras.applications.resnet50 import ResNet50
from tensorflow.keras.applications.resnet50 import preprocess_input
from tensorflow.keras.applications import ResNet152V2
from tensorflow.keras.preprocessing import image
from tensorflow.keras.preprocessing.image import ImageDataGenerator,load_img
from tensorflow.keras.models import Sequential
import numpy as np
from glob import glob
import warnings
warnings.filterwarnings("ignore")

Transfer Learning Using Inception V3

In [35]:
# re-size all the images to this
IMAGE_SIZE = [224, 224]

train_path = 'C:/Users/admin/Desktop/Great Learning/Computer Vision/Project/flowers'
In [36]:
inception = InceptionV3(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)

# don't train existing weights
for layer in inception.layers:
    layer.trainable = False
# useful for getting number of output classes
folders = glob('C:/Users/admin/Desktop/Great Learning/Computer Vision/Project/flowers/*')
x = Flatten()(inception.output)
In [37]:
prediction = Dense(len(folders), activation='softmax')(x)

# create a model object
inception_model = Model(inputs=inception.input, outputs=prediction)

# view the structure of the model
inception_model.summary()
Model: "functional_1"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            [(None, 224, 224, 3) 0                                            
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, 111, 111, 32) 864         input_1[0][0]                    
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 111, 111, 32) 96          conv2d_2[0][0]                   
__________________________________________________________________________________________________
activation_15 (Activation)      (None, 111, 111, 32) 0           batch_normalization[0][0]        
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, 109, 109, 32) 9216        activation_15[0][0]              
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 109, 109, 32) 96          conv2d_3[0][0]                   
__________________________________________________________________________________________________
activation_16 (Activation)      (None, 109, 109, 32) 0           batch_normalization_1[0][0]      
__________________________________________________________________________________________________
conv2d_4 (Conv2D)               (None, 109, 109, 64) 18432       activation_16[0][0]              
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 109, 109, 64) 192         conv2d_4[0][0]                   
__________________________________________________________________________________________________
activation_17 (Activation)      (None, 109, 109, 64) 0           batch_normalization_2[0][0]      
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D)  (None, 54, 54, 64)   0           activation_17[0][0]              
__________________________________________________________________________________________________
conv2d_5 (Conv2D)               (None, 54, 54, 80)   5120        max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 54, 54, 80)   240         conv2d_5[0][0]                   
__________________________________________________________________________________________________
activation_18 (Activation)      (None, 54, 54, 80)   0           batch_normalization_3[0][0]      
__________________________________________________________________________________________________
conv2d_6 (Conv2D)               (None, 52, 52, 192)  138240      activation_18[0][0]              
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 52, 52, 192)  576         conv2d_6[0][0]                   
__________________________________________________________________________________________________
activation_19 (Activation)      (None, 52, 52, 192)  0           batch_normalization_4[0][0]      
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D)  (None, 25, 25, 192)  0           activation_19[0][0]              
__________________________________________________________________________________________________
conv2d_10 (Conv2D)              (None, 25, 25, 64)   12288       max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 25, 25, 64)   192         conv2d_10[0][0]                  
__________________________________________________________________________________________________
activation_23 (Activation)      (None, 25, 25, 64)   0           batch_normalization_8[0][0]      
__________________________________________________________________________________________________
conv2d_8 (Conv2D)               (None, 25, 25, 48)   9216        max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
conv2d_11 (Conv2D)              (None, 25, 25, 96)   55296       activation_23[0][0]              
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 25, 25, 48)   144         conv2d_8[0][0]                   
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 25, 25, 96)   288         conv2d_11[0][0]                  
__________________________________________________________________________________________________
activation_21 (Activation)      (None, 25, 25, 48)   0           batch_normalization_6[0][0]      
__________________________________________________________________________________________________
activation_24 (Activation)      (None, 25, 25, 96)   0           batch_normalization_9[0][0]      
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, 25, 25, 192)  0           max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
conv2d_7 (Conv2D)               (None, 25, 25, 64)   12288       max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
conv2d_9 (Conv2D)               (None, 25, 25, 64)   76800       activation_21[0][0]              
__________________________________________________________________________________________________
conv2d_12 (Conv2D)              (None, 25, 25, 96)   82944       activation_24[0][0]              
__________________________________________________________________________________________________
conv2d_13 (Conv2D)              (None, 25, 25, 32)   6144        average_pooling2d[0][0]          
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 25, 25, 64)   192         conv2d_7[0][0]                   
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 25, 25, 64)   192         conv2d_9[0][0]                   
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 25, 25, 96)   288         conv2d_12[0][0]                  
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 25, 25, 32)   96          conv2d_13[0][0]                  
__________________________________________________________________________________________________
activation_20 (Activation)      (None, 25, 25, 64)   0           batch_normalization_5[0][0]      
__________________________________________________________________________________________________
activation_22 (Activation)      (None, 25, 25, 64)   0           batch_normalization_7[0][0]      
__________________________________________________________________________________________________
activation_25 (Activation)      (None, 25, 25, 96)   0           batch_normalization_10[0][0]     
__________________________________________________________________________________________________
activation_26 (Activation)      (None, 25, 25, 32)   0           batch_normalization_11[0][0]     
__________________________________________________________________________________________________
mixed0 (Concatenate)            (None, 25, 25, 256)  0           activation_20[0][0]              
                                                                 activation_22[0][0]              
                                                                 activation_25[0][0]              
                                                                 activation_26[0][0]              
__________________________________________________________________________________________________
conv2d_17 (Conv2D)              (None, 25, 25, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 25, 25, 64)   192         conv2d_17[0][0]                  
__________________________________________________________________________________________________
activation_30 (Activation)      (None, 25, 25, 64)   0           batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_15 (Conv2D)              (None, 25, 25, 48)   12288       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_18 (Conv2D)              (None, 25, 25, 96)   55296       activation_30[0][0]              
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 25, 25, 48)   144         conv2d_15[0][0]                  
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 25, 25, 96)   288         conv2d_18[0][0]                  
__________________________________________________________________________________________________
activation_28 (Activation)      (None, 25, 25, 48)   0           batch_normalization_13[0][0]     
__________________________________________________________________________________________________
activation_31 (Activation)      (None, 25, 25, 96)   0           batch_normalization_16[0][0]     
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 25, 25, 256)  0           mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_14 (Conv2D)              (None, 25, 25, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_16 (Conv2D)              (None, 25, 25, 64)   76800       activation_28[0][0]              
__________________________________________________________________________________________________
conv2d_19 (Conv2D)              (None, 25, 25, 96)   82944       activation_31[0][0]              
__________________________________________________________________________________________________
conv2d_20 (Conv2D)              (None, 25, 25, 64)   16384       average_pooling2d_1[0][0]        
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 25, 25, 64)   192         conv2d_14[0][0]                  
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 25, 25, 64)   192         conv2d_16[0][0]                  
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 25, 25, 96)   288         conv2d_19[0][0]                  
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 25, 25, 64)   192         conv2d_20[0][0]                  
__________________________________________________________________________________________________
activation_27 (Activation)      (None, 25, 25, 64)   0           batch_normalization_12[0][0]     
__________________________________________________________________________________________________
activation_29 (Activation)      (None, 25, 25, 64)   0           batch_normalization_14[0][0]     
__________________________________________________________________________________________________
activation_32 (Activation)      (None, 25, 25, 96)   0           batch_normalization_17[0][0]     
__________________________________________________________________________________________________
activation_33 (Activation)      (None, 25, 25, 64)   0           batch_normalization_18[0][0]     
__________________________________________________________________________________________________
mixed1 (Concatenate)            (None, 25, 25, 288)  0           activation_27[0][0]              
                                                                 activation_29[0][0]              
                                                                 activation_32[0][0]              
                                                                 activation_33[0][0]              
__________________________________________________________________________________________________
conv2d_24 (Conv2D)              (None, 25, 25, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 25, 25, 64)   192         conv2d_24[0][0]                  
__________________________________________________________________________________________________
activation_37 (Activation)      (None, 25, 25, 64)   0           batch_normalization_22[0][0]     
__________________________________________________________________________________________________
conv2d_22 (Conv2D)              (None, 25, 25, 48)   13824       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_25 (Conv2D)              (None, 25, 25, 96)   55296       activation_37[0][0]              
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 25, 25, 48)   144         conv2d_22[0][0]                  
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 25, 25, 96)   288         conv2d_25[0][0]                  
__________________________________________________________________________________________________
activation_35 (Activation)      (None, 25, 25, 48)   0           batch_normalization_20[0][0]     
__________________________________________________________________________________________________
activation_38 (Activation)      (None, 25, 25, 96)   0           batch_normalization_23[0][0]     
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 25, 25, 288)  0           mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_21 (Conv2D)              (None, 25, 25, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_23 (Conv2D)              (None, 25, 25, 64)   76800       activation_35[0][0]              
__________________________________________________________________________________________________
conv2d_26 (Conv2D)              (None, 25, 25, 96)   82944       activation_38[0][0]              
__________________________________________________________________________________________________
conv2d_27 (Conv2D)              (None, 25, 25, 64)   18432       average_pooling2d_2[0][0]        
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 25, 25, 64)   192         conv2d_21[0][0]                  
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 25, 25, 64)   192         conv2d_23[0][0]                  
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 25, 25, 96)   288         conv2d_26[0][0]                  
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 25, 25, 64)   192         conv2d_27[0][0]                  
__________________________________________________________________________________________________
activation_34 (Activation)      (None, 25, 25, 64)   0           batch_normalization_19[0][0]     
__________________________________________________________________________________________________
activation_36 (Activation)      (None, 25, 25, 64)   0           batch_normalization_21[0][0]     
__________________________________________________________________________________________________
activation_39 (Activation)      (None, 25, 25, 96)   0           batch_normalization_24[0][0]     
__________________________________________________________________________________________________
activation_40 (Activation)      (None, 25, 25, 64)   0           batch_normalization_25[0][0]     
__________________________________________________________________________________________________
mixed2 (Concatenate)            (None, 25, 25, 288)  0           activation_34[0][0]              
                                                                 activation_36[0][0]              
                                                                 activation_39[0][0]              
                                                                 activation_40[0][0]              
__________________________________________________________________________________________________
conv2d_29 (Conv2D)              (None, 25, 25, 64)   18432       mixed2[0][0]                     
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 25, 25, 64)   192         conv2d_29[0][0]                  
__________________________________________________________________________________________________
activation_42 (Activation)      (None, 25, 25, 64)   0           batch_normalization_27[0][0]     
__________________________________________________________________________________________________
conv2d_30 (Conv2D)              (None, 25, 25, 96)   55296       activation_42[0][0]              
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 25, 25, 96)   288         conv2d_30[0][0]                  
__________________________________________________________________________________________________
activation_43 (Activation)      (None, 25, 25, 96)   0           batch_normalization_28[0][0]     
__________________________________________________________________________________________________
conv2d_28 (Conv2D)              (None, 12, 12, 384)  995328      mixed2[0][0]                     
__________________________________________________________________________________________________
conv2d_31 (Conv2D)              (None, 12, 12, 96)   82944       activation_43[0][0]              
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 12, 12, 384)  1152        conv2d_28[0][0]                  
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 12, 12, 96)   288         conv2d_31[0][0]                  
__________________________________________________________________________________________________
activation_41 (Activation)      (None, 12, 12, 384)  0           batch_normalization_26[0][0]     
__________________________________________________________________________________________________
activation_44 (Activation)      (None, 12, 12, 96)   0           batch_normalization_29[0][0]     
__________________________________________________________________________________________________
max_pooling2d_4 (MaxPooling2D)  (None, 12, 12, 288)  0           mixed2[0][0]                     
__________________________________________________________________________________________________
mixed3 (Concatenate)            (None, 12, 12, 768)  0           activation_41[0][0]              
                                                                 activation_44[0][0]              
                                                                 max_pooling2d_4[0][0]            
__________________________________________________________________________________________________
conv2d_36 (Conv2D)              (None, 12, 12, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 12, 12, 128)  384         conv2d_36[0][0]                  
__________________________________________________________________________________________________
activation_49 (Activation)      (None, 12, 12, 128)  0           batch_normalization_34[0][0]     
__________________________________________________________________________________________________
conv2d_37 (Conv2D)              (None, 12, 12, 128)  114688      activation_49[0][0]              
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 12, 12, 128)  384         conv2d_37[0][0]                  
__________________________________________________________________________________________________
activation_50 (Activation)      (None, 12, 12, 128)  0           batch_normalization_35[0][0]     
__________________________________________________________________________________________________
conv2d_33 (Conv2D)              (None, 12, 12, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_38 (Conv2D)              (None, 12, 12, 128)  114688      activation_50[0][0]              
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 12, 12, 128)  384         conv2d_33[0][0]                  
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 12, 12, 128)  384         conv2d_38[0][0]                  
__________________________________________________________________________________________________
activation_46 (Activation)      (None, 12, 12, 128)  0           batch_normalization_31[0][0]     
__________________________________________________________________________________________________
activation_51 (Activation)      (None, 12, 12, 128)  0           batch_normalization_36[0][0]     
__________________________________________________________________________________________________
conv2d_34 (Conv2D)              (None, 12, 12, 128)  114688      activation_46[0][0]              
__________________________________________________________________________________________________
conv2d_39 (Conv2D)              (None, 12, 12, 128)  114688      activation_51[0][0]              
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 12, 12, 128)  384         conv2d_34[0][0]                  
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 12, 12, 128)  384         conv2d_39[0][0]                  
__________________________________________________________________________________________________
activation_47 (Activation)      (None, 12, 12, 128)  0           batch_normalization_32[0][0]     
__________________________________________________________________________________________________
activation_52 (Activation)      (None, 12, 12, 128)  0           batch_normalization_37[0][0]     
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 12, 12, 768)  0           mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_32 (Conv2D)              (None, 12, 12, 192)  147456      mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_35 (Conv2D)              (None, 12, 12, 192)  172032      activation_47[0][0]              
__________________________________________________________________________________________________
conv2d_40 (Conv2D)              (None, 12, 12, 192)  172032      activation_52[0][0]              
__________________________________________________________________________________________________
conv2d_41 (Conv2D)              (None, 12, 12, 192)  147456      average_pooling2d_3[0][0]        
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 12, 12, 192)  576         conv2d_32[0][0]                  
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 12, 12, 192)  576         conv2d_35[0][0]                  
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 12, 12, 192)  576         conv2d_40[0][0]                  
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 12, 12, 192)  576         conv2d_41[0][0]                  
__________________________________________________________________________________________________
activation_45 (Activation)      (None, 12, 12, 192)  0           batch_normalization_30[0][0]     
__________________________________________________________________________________________________
activation_48 (Activation)      (None, 12, 12, 192)  0           batch_normalization_33[0][0]     
__________________________________________________________________________________________________
activation_53 (Activation)      (None, 12, 12, 192)  0           batch_normalization_38[0][0]     
__________________________________________________________________________________________________
activation_54 (Activation)      (None, 12, 12, 192)  0           batch_normalization_39[0][0]     
__________________________________________________________________________________________________
mixed4 (Concatenate)            (None, 12, 12, 768)  0           activation_45[0][0]              
                                                                 activation_48[0][0]              
                                                                 activation_53[0][0]              
                                                                 activation_54[0][0]              
__________________________________________________________________________________________________
conv2d_46 (Conv2D)              (None, 12, 12, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 12, 12, 160)  480         conv2d_46[0][0]                  
__________________________________________________________________________________________________
activation_59 (Activation)      (None, 12, 12, 160)  0           batch_normalization_44[0][0]     
__________________________________________________________________________________________________
conv2d_47 (Conv2D)              (None, 12, 12, 160)  179200      activation_59[0][0]              
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 12, 12, 160)  480         conv2d_47[0][0]                  
__________________________________________________________________________________________________
activation_60 (Activation)      (None, 12, 12, 160)  0           batch_normalization_45[0][0]     
__________________________________________________________________________________________________
conv2d_43 (Conv2D)              (None, 12, 12, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_48 (Conv2D)              (None, 12, 12, 160)  179200      activation_60[0][0]              
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 12, 12, 160)  480         conv2d_43[0][0]                  
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 12, 12, 160)  480         conv2d_48[0][0]                  
__________________________________________________________________________________________________
activation_56 (Activation)      (None, 12, 12, 160)  0           batch_normalization_41[0][0]     
__________________________________________________________________________________________________
activation_61 (Activation)      (None, 12, 12, 160)  0           batch_normalization_46[0][0]     
__________________________________________________________________________________________________
conv2d_44 (Conv2D)              (None, 12, 12, 160)  179200      activation_56[0][0]              
__________________________________________________________________________________________________
conv2d_49 (Conv2D)              (None, 12, 12, 160)  179200      activation_61[0][0]              
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 12, 12, 160)  480         conv2d_44[0][0]                  
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 12, 12, 160)  480         conv2d_49[0][0]                  
__________________________________________________________________________________________________
activation_57 (Activation)      (None, 12, 12, 160)  0           batch_normalization_42[0][0]     
__________________________________________________________________________________________________
activation_62 (Activation)      (None, 12, 12, 160)  0           batch_normalization_47[0][0]     
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 12, 12, 768)  0           mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_42 (Conv2D)              (None, 12, 12, 192)  147456      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_45 (Conv2D)              (None, 12, 12, 192)  215040      activation_57[0][0]              
__________________________________________________________________________________________________
conv2d_50 (Conv2D)              (None, 12, 12, 192)  215040      activation_62[0][0]              
__________________________________________________________________________________________________
conv2d_51 (Conv2D)              (None, 12, 12, 192)  147456      average_pooling2d_4[0][0]        
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 12, 12, 192)  576         conv2d_42[0][0]                  
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 12, 12, 192)  576         conv2d_45[0][0]                  
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 12, 12, 192)  576         conv2d_50[0][0]                  
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 12, 12, 192)  576         conv2d_51[0][0]                  
__________________________________________________________________________________________________
activation_55 (Activation)      (None, 12, 12, 192)  0           batch_normalization_40[0][0]     
__________________________________________________________________________________________________
activation_58 (Activation)      (None, 12, 12, 192)  0           batch_normalization_43[0][0]     
__________________________________________________________________________________________________
activation_63 (Activation)      (None, 12, 12, 192)  0           batch_normalization_48[0][0]     
__________________________________________________________________________________________________
activation_64 (Activation)      (None, 12, 12, 192)  0           batch_normalization_49[0][0]     
__________________________________________________________________________________________________
mixed5 (Concatenate)            (None, 12, 12, 768)  0           activation_55[0][0]              
                                                                 activation_58[0][0]              
                                                                 activation_63[0][0]              
                                                                 activation_64[0][0]              
__________________________________________________________________________________________________
conv2d_56 (Conv2D)              (None, 12, 12, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, 12, 12, 160)  480         conv2d_56[0][0]                  
__________________________________________________________________________________________________
activation_69 (Activation)      (None, 12, 12, 160)  0           batch_normalization_54[0][0]     
__________________________________________________________________________________________________
conv2d_57 (Conv2D)              (None, 12, 12, 160)  179200      activation_69[0][0]              
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, 12, 12, 160)  480         conv2d_57[0][0]                  
__________________________________________________________________________________________________
activation_70 (Activation)      (None, 12, 12, 160)  0           batch_normalization_55[0][0]     
__________________________________________________________________________________________________
conv2d_53 (Conv2D)              (None, 12, 12, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_58 (Conv2D)              (None, 12, 12, 160)  179200      activation_70[0][0]              
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 12, 12, 160)  480         conv2d_53[0][0]                  
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, 12, 12, 160)  480         conv2d_58[0][0]                  
__________________________________________________________________________________________________
activation_66 (Activation)      (None, 12, 12, 160)  0           batch_normalization_51[0][0]     
__________________________________________________________________________________________________
activation_71 (Activation)      (None, 12, 12, 160)  0           batch_normalization_56[0][0]     
__________________________________________________________________________________________________
conv2d_54 (Conv2D)              (None, 12, 12, 160)  179200      activation_66[0][0]              
__________________________________________________________________________________________________
conv2d_59 (Conv2D)              (None, 12, 12, 160)  179200      activation_71[0][0]              
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 12, 12, 160)  480         conv2d_54[0][0]                  
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, 12, 12, 160)  480         conv2d_59[0][0]                  
__________________________________________________________________________________________________
activation_67 (Activation)      (None, 12, 12, 160)  0           batch_normalization_52[0][0]     
__________________________________________________________________________________________________
activation_72 (Activation)      (None, 12, 12, 160)  0           batch_normalization_57[0][0]     
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 12, 12, 768)  0           mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_52 (Conv2D)              (None, 12, 12, 192)  147456      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_55 (Conv2D)              (None, 12, 12, 192)  215040      activation_67[0][0]              
__________________________________________________________________________________________________
conv2d_60 (Conv2D)              (None, 12, 12, 192)  215040      activation_72[0][0]              
__________________________________________________________________________________________________
conv2d_61 (Conv2D)              (None, 12, 12, 192)  147456      average_pooling2d_5[0][0]        
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 12, 12, 192)  576         conv2d_52[0][0]                  
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, 12, 12, 192)  576         conv2d_55[0][0]                  
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, 12, 12, 192)  576         conv2d_60[0][0]                  
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, 12, 12, 192)  576         conv2d_61[0][0]                  
__________________________________________________________________________________________________
activation_65 (Activation)      (None, 12, 12, 192)  0           batch_normalization_50[0][0]     
__________________________________________________________________________________________________
activation_68 (Activation)      (None, 12, 12, 192)  0           batch_normalization_53[0][0]     
__________________________________________________________________________________________________
activation_73 (Activation)      (None, 12, 12, 192)  0           batch_normalization_58[0][0]     
__________________________________________________________________________________________________
activation_74 (Activation)      (None, 12, 12, 192)  0           batch_normalization_59[0][0]     
__________________________________________________________________________________________________
mixed6 (Concatenate)            (None, 12, 12, 768)  0           activation_65[0][0]              
                                                                 activation_68[0][0]              
                                                                 activation_73[0][0]              
                                                                 activation_74[0][0]              
__________________________________________________________________________________________________
conv2d_66 (Conv2D)              (None, 12, 12, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, 12, 12, 192)  576         conv2d_66[0][0]                  
__________________________________________________________________________________________________
activation_79 (Activation)      (None, 12, 12, 192)  0           batch_normalization_64[0][0]     
__________________________________________________________________________________________________
conv2d_67 (Conv2D)              (None, 12, 12, 192)  258048      activation_79[0][0]              
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, 12, 12, 192)  576         conv2d_67[0][0]                  
__________________________________________________________________________________________________
activation_80 (Activation)      (None, 12, 12, 192)  0           batch_normalization_65[0][0]     
__________________________________________________________________________________________________
conv2d_63 (Conv2D)              (None, 12, 12, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_68 (Conv2D)              (None, 12, 12, 192)  258048      activation_80[0][0]              
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, 12, 12, 192)  576         conv2d_63[0][0]                  
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, 12, 12, 192)  576         conv2d_68[0][0]                  
__________________________________________________________________________________________________
activation_76 (Activation)      (None, 12, 12, 192)  0           batch_normalization_61[0][0]     
__________________________________________________________________________________________________
activation_81 (Activation)      (None, 12, 12, 192)  0           batch_normalization_66[0][0]     
__________________________________________________________________________________________________
conv2d_64 (Conv2D)              (None, 12, 12, 192)  258048      activation_76[0][0]              
__________________________________________________________________________________________________
conv2d_69 (Conv2D)              (None, 12, 12, 192)  258048      activation_81[0][0]              
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, 12, 12, 192)  576         conv2d_64[0][0]                  
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, 12, 12, 192)  576         conv2d_69[0][0]                  
__________________________________________________________________________________________________
activation_77 (Activation)      (None, 12, 12, 192)  0           batch_normalization_62[0][0]     
__________________________________________________________________________________________________
activation_82 (Activation)      (None, 12, 12, 192)  0           batch_normalization_67[0][0]     
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 12, 12, 768)  0           mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_62 (Conv2D)              (None, 12, 12, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_65 (Conv2D)              (None, 12, 12, 192)  258048      activation_77[0][0]              
__________________________________________________________________________________________________
conv2d_70 (Conv2D)              (None, 12, 12, 192)  258048      activation_82[0][0]              
__________________________________________________________________________________________________
conv2d_71 (Conv2D)              (None, 12, 12, 192)  147456      average_pooling2d_6[0][0]        
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, 12, 12, 192)  576         conv2d_62[0][0]                  
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, 12, 12, 192)  576         conv2d_65[0][0]                  
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, 12, 12, 192)  576         conv2d_70[0][0]                  
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, 12, 12, 192)  576         conv2d_71[0][0]                  
__________________________________________________________________________________________________
activation_75 (Activation)      (None, 12, 12, 192)  0           batch_normalization_60[0][0]     
__________________________________________________________________________________________________
activation_78 (Activation)      (None, 12, 12, 192)  0           batch_normalization_63[0][0]     
__________________________________________________________________________________________________
activation_83 (Activation)      (None, 12, 12, 192)  0           batch_normalization_68[0][0]     
__________________________________________________________________________________________________
activation_84 (Activation)      (None, 12, 12, 192)  0           batch_normalization_69[0][0]     
__________________________________________________________________________________________________
mixed7 (Concatenate)            (None, 12, 12, 768)  0           activation_75[0][0]              
                                                                 activation_78[0][0]              
                                                                 activation_83[0][0]              
                                                                 activation_84[0][0]              
__________________________________________________________________________________________________
conv2d_74 (Conv2D)              (None, 12, 12, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, 12, 12, 192)  576         conv2d_74[0][0]                  
__________________________________________________________________________________________________
activation_87 (Activation)      (None, 12, 12, 192)  0           batch_normalization_72[0][0]     
__________________________________________________________________________________________________
conv2d_75 (Conv2D)              (None, 12, 12, 192)  258048      activation_87[0][0]              
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, 12, 12, 192)  576         conv2d_75[0][0]                  
__________________________________________________________________________________________________
activation_88 (Activation)      (None, 12, 12, 192)  0           batch_normalization_73[0][0]     
__________________________________________________________________________________________________
conv2d_72 (Conv2D)              (None, 12, 12, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
conv2d_76 (Conv2D)              (None, 12, 12, 192)  258048      activation_88[0][0]              
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, 12, 12, 192)  576         conv2d_72[0][0]                  
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, 12, 12, 192)  576         conv2d_76[0][0]                  
__________________________________________________________________________________________________
activation_85 (Activation)      (None, 12, 12, 192)  0           batch_normalization_70[0][0]     
__________________________________________________________________________________________________
activation_89 (Activation)      (None, 12, 12, 192)  0           batch_normalization_74[0][0]     
__________________________________________________________________________________________________
conv2d_73 (Conv2D)              (None, 5, 5, 320)    552960      activation_85[0][0]              
__________________________________________________________________________________________________
conv2d_77 (Conv2D)              (None, 5, 5, 192)    331776      activation_89[0][0]              
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, 5, 5, 320)    960         conv2d_73[0][0]                  
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, 5, 5, 192)    576         conv2d_77[0][0]                  
__________________________________________________________________________________________________
activation_86 (Activation)      (None, 5, 5, 320)    0           batch_normalization_71[0][0]     
__________________________________________________________________________________________________
activation_90 (Activation)      (None, 5, 5, 192)    0           batch_normalization_75[0][0]     
__________________________________________________________________________________________________
max_pooling2d_5 (MaxPooling2D)  (None, 5, 5, 768)    0           mixed7[0][0]                     
__________________________________________________________________________________________________
mixed8 (Concatenate)            (None, 5, 5, 1280)   0           activation_86[0][0]              
                                                                 activation_90[0][0]              
                                                                 max_pooling2d_5[0][0]            
__________________________________________________________________________________________________
conv2d_82 (Conv2D)              (None, 5, 5, 448)    573440      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, 5, 5, 448)    1344        conv2d_82[0][0]                  
__________________________________________________________________________________________________
activation_95 (Activation)      (None, 5, 5, 448)    0           batch_normalization_80[0][0]     
__________________________________________________________________________________________________
conv2d_79 (Conv2D)              (None, 5, 5, 384)    491520      mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_83 (Conv2D)              (None, 5, 5, 384)    1548288     activation_95[0][0]              
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, 5, 5, 384)    1152        conv2d_79[0][0]                  
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, 5, 5, 384)    1152        conv2d_83[0][0]                  
__________________________________________________________________________________________________
activation_92 (Activation)      (None, 5, 5, 384)    0           batch_normalization_77[0][0]     
__________________________________________________________________________________________________
activation_96 (Activation)      (None, 5, 5, 384)    0           batch_normalization_81[0][0]     
__________________________________________________________________________________________________
conv2d_80 (Conv2D)              (None, 5, 5, 384)    442368      activation_92[0][0]              
__________________________________________________________________________________________________
conv2d_81 (Conv2D)              (None, 5, 5, 384)    442368      activation_92[0][0]              
__________________________________________________________________________________________________
conv2d_84 (Conv2D)              (None, 5, 5, 384)    442368      activation_96[0][0]              
__________________________________________________________________________________________________
conv2d_85 (Conv2D)              (None, 5, 5, 384)    442368      activation_96[0][0]              
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, 5, 5, 1280)   0           mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_78 (Conv2D)              (None, 5, 5, 320)    409600      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, 5, 5, 384)    1152        conv2d_80[0][0]                  
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, 5, 5, 384)    1152        conv2d_81[0][0]                  
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, 5, 5, 384)    1152        conv2d_84[0][0]                  
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, 5, 5, 384)    1152        conv2d_85[0][0]                  
__________________________________________________________________________________________________
conv2d_86 (Conv2D)              (None, 5, 5, 192)    245760      average_pooling2d_7[0][0]        
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, 5, 5, 320)    960         conv2d_78[0][0]                  
__________________________________________________________________________________________________
activation_93 (Activation)      (None, 5, 5, 384)    0           batch_normalization_78[0][0]     
__________________________________________________________________________________________________
activation_94 (Activation)      (None, 5, 5, 384)    0           batch_normalization_79[0][0]     
__________________________________________________________________________________________________
activation_97 (Activation)      (None, 5, 5, 384)    0           batch_normalization_82[0][0]     
__________________________________________________________________________________________________
activation_98 (Activation)      (None, 5, 5, 384)    0           batch_normalization_83[0][0]     
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, 5, 5, 192)    576         conv2d_86[0][0]                  
__________________________________________________________________________________________________
activation_91 (Activation)      (None, 5, 5, 320)    0           batch_normalization_76[0][0]     
__________________________________________________________________________________________________
mixed9_0 (Concatenate)          (None, 5, 5, 768)    0           activation_93[0][0]              
                                                                 activation_94[0][0]              
__________________________________________________________________________________________________
concatenate (Concatenate)       (None, 5, 5, 768)    0           activation_97[0][0]              
                                                                 activation_98[0][0]              
__________________________________________________________________________________________________
activation_99 (Activation)      (None, 5, 5, 192)    0           batch_normalization_84[0][0]     
__________________________________________________________________________________________________
mixed9 (Concatenate)            (None, 5, 5, 2048)   0           activation_91[0][0]              
                                                                 mixed9_0[0][0]                   
                                                                 concatenate[0][0]                
                                                                 activation_99[0][0]              
__________________________________________________________________________________________________
conv2d_91 (Conv2D)              (None, 5, 5, 448)    917504      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, 5, 5, 448)    1344        conv2d_91[0][0]                  
__________________________________________________________________________________________________
activation_104 (Activation)     (None, 5, 5, 448)    0           batch_normalization_89[0][0]     
__________________________________________________________________________________________________
conv2d_88 (Conv2D)              (None, 5, 5, 384)    786432      mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_92 (Conv2D)              (None, 5, 5, 384)    1548288     activation_104[0][0]             
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, 5, 5, 384)    1152        conv2d_88[0][0]                  
__________________________________________________________________________________________________
batch_normalization_90 (BatchNo (None, 5, 5, 384)    1152        conv2d_92[0][0]                  
__________________________________________________________________________________________________
activation_101 (Activation)     (None, 5, 5, 384)    0           batch_normalization_86[0][0]     
__________________________________________________________________________________________________
activation_105 (Activation)     (None, 5, 5, 384)    0           batch_normalization_90[0][0]     
__________________________________________________________________________________________________
conv2d_89 (Conv2D)              (None, 5, 5, 384)    442368      activation_101[0][0]             
__________________________________________________________________________________________________
conv2d_90 (Conv2D)              (None, 5, 5, 384)    442368      activation_101[0][0]             
__________________________________________________________________________________________________
conv2d_93 (Conv2D)              (None, 5, 5, 384)    442368      activation_105[0][0]             
__________________________________________________________________________________________________
conv2d_94 (Conv2D)              (None, 5, 5, 384)    442368      activation_105[0][0]             
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, 5, 5, 2048)   0           mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_87 (Conv2D)              (None, 5, 5, 320)    655360      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, 5, 5, 384)    1152        conv2d_89[0][0]                  
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, 5, 5, 384)    1152        conv2d_90[0][0]                  
__________________________________________________________________________________________________
batch_normalization_91 (BatchNo (None, 5, 5, 384)    1152        conv2d_93[0][0]                  
__________________________________________________________________________________________________
batch_normalization_92 (BatchNo (None, 5, 5, 384)    1152        conv2d_94[0][0]                  
__________________________________________________________________________________________________
conv2d_95 (Conv2D)              (None, 5, 5, 192)    393216      average_pooling2d_8[0][0]        
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, 5, 5, 320)    960         conv2d_87[0][0]                  
__________________________________________________________________________________________________
activation_102 (Activation)     (None, 5, 5, 384)    0           batch_normalization_87[0][0]     
__________________________________________________________________________________________________
activation_103 (Activation)     (None, 5, 5, 384)    0           batch_normalization_88[0][0]     
__________________________________________________________________________________________________
activation_106 (Activation)     (None, 5, 5, 384)    0           batch_normalization_91[0][0]     
__________________________________________________________________________________________________
activation_107 (Activation)     (None, 5, 5, 384)    0           batch_normalization_92[0][0]     
__________________________________________________________________________________________________
batch_normalization_93 (BatchNo (None, 5, 5, 192)    576         conv2d_95[0][0]                  
__________________________________________________________________________________________________
activation_100 (Activation)     (None, 5, 5, 320)    0           batch_normalization_85[0][0]     
__________________________________________________________________________________________________
mixed9_1 (Concatenate)          (None, 5, 5, 768)    0           activation_102[0][0]             
                                                                 activation_103[0][0]             
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, 5, 5, 768)    0           activation_106[0][0]             
                                                                 activation_107[0][0]             
__________________________________________________________________________________________________
activation_108 (Activation)     (None, 5, 5, 192)    0           batch_normalization_93[0][0]     
__________________________________________________________________________________________________
mixed10 (Concatenate)           (None, 5, 5, 2048)   0           activation_100[0][0]             
                                                                 mixed9_1[0][0]                   
                                                                 concatenate_1[0][0]              
                                                                 activation_108[0][0]             
__________________________________________________________________________________________________
flatten_1 (Flatten)             (None, 51200)        0           mixed10[0][0]                    
__________________________________________________________________________________________________
dense_17 (Dense)                (None, 17)           870417      flatten_1[0][0]                  
==================================================================================================
Total params: 22,673,201
Trainable params: 870,417
Non-trainable params: 21,802,784
__________________________________________________________________________________________________
In [38]:
inception_model.compile(
  loss='categorical_crossentropy',
  optimizer='adam',
  metrics=['accuracy']
)
In [39]:
from tensorflow.keras.preprocessing.image import ImageDataGenerator

train_datagen = ImageDataGenerator(rescale = 1./255,
                                   shear_range = 0.2,
                                   zoom_range = 0.2,
                                   horizontal_flip = True,
                                  validation_split=0.25)
In [40]:
training_set = train_datagen.flow_from_directory('C:/Users/admin/Desktop/Great Learning/Computer Vision/Project/flowers',
                                                 target_size = (224, 224),
                                                 batch_size = 32,
                                                 class_mode = 'categorical',
                                                subset='training')

validation_set = train_datagen.flow_from_directory('C:/Users/admin/Desktop/Great Learning/Computer Vision/Project/flowers',
                                                 target_size = (224, 224),
                                                 batch_size = 32,
                                                 class_mode = 'categorical',
                                                subset='validation')
Found 1020 images belonging to 17 classes.
Found 340 images belonging to 17 classes.
In [41]:
import time

StartTime = time.time()
r = inception_model.fit_generator(
  training_set,
  epochs=10,
  validation_data = validation_set,
  steps_per_epoch=len(training_set)
)
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
Epoch 1/10
32/32 [==============================] - 150s 5s/step - loss: 8.2600 - accuracy: 0.4892 - val_loss: 1.8241 - val_accuracy: 0.7853
Epoch 2/10
32/32 [==============================] - 146s 5s/step - loss: 1.3433 - accuracy: 0.8225 - val_loss: 1.1688 - val_accuracy: 0.8618
Epoch 3/10
32/32 [==============================] - 151s 5s/step - loss: 0.7986 - accuracy: 0.8824 - val_loss: 1.4232 - val_accuracy: 0.8206
Epoch 4/10
32/32 [==============================] - 95s 3s/step - loss: 0.6119 - accuracy: 0.9098 - val_loss: 0.9743 - val_accuracy: 0.8647
Epoch 5/10
32/32 [==============================] - 81s 3s/step - loss: 0.3749 - accuracy: 0.9412 - val_loss: 1.7279 - val_accuracy: 0.8294
Epoch 6/10
32/32 [==============================] - 82s 3s/step - loss: 0.2301 - accuracy: 0.9578 - val_loss: 0.7774 - val_accuracy: 0.8824
Epoch 7/10
32/32 [==============================] - 81s 3s/step - loss: 0.2698 - accuracy: 0.9451 - val_loss: 2.0720 - val_accuracy: 0.7971
Epoch 8/10
32/32 [==============================] - 81s 3s/step - loss: 0.3583 - accuracy: 0.9451 - val_loss: 1.3625 - val_accuracy: 0.8559
Epoch 9/10
32/32 [==============================] - 84s 3s/step - loss: 0.5174 - accuracy: 0.9314 - val_loss: 1.5501 - val_accuracy: 0.8500
Epoch 10/10
32/32 [==============================] - 83s 3s/step - loss: 0.2624 - accuracy: 0.9647 - val_loss: 1.1442 - val_accuracy: 0.8824
############### Total Time Taken:  18 Minutes #############
In [42]:
results = inception_model.evaluate(validation_set)
print('Validation accuracy using Inception_V3 is : ', results[1]*100,'%')
11/11 [==============================] - 19s 2s/step - loss: 1.2743 - accuracy: 0.8765
Validation accuracy using Inception_V3 is :  87.64705657958984 %
In [43]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(r.history['loss'], label='train loss')
plt.plot(r.history['val_loss'], label='val loss')
plt.legend()
plt.show()
plt.savefig('LossVal_loss')

# plot the accuracy
plt.plot(r.history['accuracy'], label='train acc')
plt.plot(r.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
plt.savefig('AccVal_acc')
<Figure size 432x288 with 0 Axes>
In [44]:
# save it as a pickle file
from tensorflow.keras.models import load_model
inception_model.save('inception_v3_model.pkl')
INFO:tensorflow:Assets written to: inception_v3_model.pkl\assets

Transfer Learning Using Resnet50

In [45]:
# Here we will be using imagenet weights
resnet = ResNet50(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)
In [46]:
x = Flatten()(resnet.output)
prediction = Dense(len(folders), activation='softmax')(x)
# don't train existing weights
for layer in resnet.layers:
    layer.trainable = False
# create a model object
resnet50_model = Model(inputs=resnet.input, outputs=prediction)

resnet50_model.summary()
Model: "functional_3"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_2 (InputLayer)            [(None, 224, 224, 3) 0                                            
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D)       (None, 230, 230, 3)  0           input_2[0][0]                    
__________________________________________________________________________________________________
conv1_conv (Conv2D)             (None, 112, 112, 64) 9472        conv1_pad[0][0]                  
__________________________________________________________________________________________________
conv1_bn (BatchNormalization)   (None, 112, 112, 64) 256         conv1_conv[0][0]                 
__________________________________________________________________________________________________
conv1_relu (Activation)         (None, 112, 112, 64) 0           conv1_bn[0][0]                   
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D)       (None, 114, 114, 64) 0           conv1_relu[0][0]                 
__________________________________________________________________________________________________
pool1_pool (MaxPooling2D)       (None, 56, 56, 64)   0           pool1_pad[0][0]                  
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D)    (None, 56, 56, 64)   4160        pool1_pool[0][0]                 
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 56, 56, 64)   0           conv2_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D)    (None, 56, 56, 64)   36928       conv2_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_2_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_2_relu (Activation (None, 56, 56, 64)   0           conv2_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_0_conv (Conv2D)    (None, 56, 56, 256)  16640       pool1_pool[0][0]                 
__________________________________________________________________________________________________
conv2_block1_3_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_0_bn (BatchNormali (None, 56, 56, 256)  1024        conv2_block1_0_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_3_bn (BatchNormali (None, 56, 56, 256)  1024        conv2_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_add (Add)          (None, 56, 56, 256)  0           conv2_block1_0_bn[0][0]          
                                                                 conv2_block1_3_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_out (Activation)   (None, 56, 56, 256)  0           conv2_block1_add[0][0]           
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D)    (None, 56, 56, 64)   16448       conv2_block1_out[0][0]           
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 56, 56, 64)   0           conv2_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D)    (None, 56, 56, 64)   36928       conv2_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_2_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_2_relu (Activation (None, 56, 56, 64)   0           conv2_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_3_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_3_bn (BatchNormali (None, 56, 56, 256)  1024        conv2_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_add (Add)          (None, 56, 56, 256)  0           conv2_block1_out[0][0]           
                                                                 conv2_block2_3_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_out (Activation)   (None, 56, 56, 256)  0           conv2_block2_add[0][0]           
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D)    (None, 56, 56, 64)   16448       conv2_block2_out[0][0]           
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 56, 56, 64)   0           conv2_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D)    (None, 56, 56, 64)   36928       conv2_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_2_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_2_relu (Activation (None, 56, 56, 64)   0           conv2_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_3_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_3_bn (BatchNormali (None, 56, 56, 256)  1024        conv2_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_add (Add)          (None, 56, 56, 256)  0           conv2_block2_out[0][0]           
                                                                 conv2_block3_3_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_out (Activation)   (None, 56, 56, 256)  0           conv2_block3_add[0][0]           
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D)    (None, 28, 28, 128)  32896       conv2_block3_out[0][0]           
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 28, 28, 128)  0           conv3_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D)    (None, 28, 28, 128)  147584      conv3_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_2_relu (Activation (None, 28, 28, 128)  0           conv3_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_0_conv (Conv2D)    (None, 28, 28, 512)  131584      conv2_block3_out[0][0]           
__________________________________________________________________________________________________
conv3_block1_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_0_bn (BatchNormali (None, 28, 28, 512)  2048        conv3_block1_0_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_3_bn (BatchNormali (None, 28, 28, 512)  2048        conv3_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_add (Add)          (None, 28, 28, 512)  0           conv3_block1_0_bn[0][0]          
                                                                 conv3_block1_3_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_out (Activation)   (None, 28, 28, 512)  0           conv3_block1_add[0][0]           
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D)    (None, 28, 28, 128)  65664       conv3_block1_out[0][0]           
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 28, 28, 128)  0           conv3_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D)    (None, 28, 28, 128)  147584      conv3_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_2_relu (Activation (None, 28, 28, 128)  0           conv3_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_3_bn (BatchNormali (None, 28, 28, 512)  2048        conv3_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_add (Add)          (None, 28, 28, 512)  0           conv3_block1_out[0][0]           
                                                                 conv3_block2_3_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_out (Activation)   (None, 28, 28, 512)  0           conv3_block2_add[0][0]           
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D)    (None, 28, 28, 128)  65664       conv3_block2_out[0][0]           
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 28, 28, 128)  0           conv3_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D)    (None, 28, 28, 128)  147584      conv3_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_2_relu (Activation (None, 28, 28, 128)  0           conv3_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_3_bn (BatchNormali (None, 28, 28, 512)  2048        conv3_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_add (Add)          (None, 28, 28, 512)  0           conv3_block2_out[0][0]           
                                                                 conv3_block3_3_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_out (Activation)   (None, 28, 28, 512)  0           conv3_block3_add[0][0]           
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D)    (None, 28, 28, 128)  65664       conv3_block3_out[0][0]           
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 28, 28, 128)  0           conv3_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D)    (None, 28, 28, 128)  147584      conv3_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_2_relu (Activation (None, 28, 28, 128)  0           conv3_block4_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block4_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_3_bn (BatchNormali (None, 28, 28, 512)  2048        conv3_block4_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_add (Add)          (None, 28, 28, 512)  0           conv3_block3_out[0][0]           
                                                                 conv3_block4_3_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_out (Activation)   (None, 28, 28, 512)  0           conv3_block4_add[0][0]           
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D)    (None, 14, 14, 256)  131328      conv3_block4_out[0][0]           
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 14, 14, 256)  0           conv4_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D)    (None, 14, 14, 256)  590080      conv4_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_2_relu (Activation (None, 14, 14, 256)  0           conv4_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_0_conv (Conv2D)    (None, 14, 14, 1024) 525312      conv3_block4_out[0][0]           
__________________________________________________________________________________________________
conv4_block1_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_0_bn (BatchNormali (None, 14, 14, 1024) 4096        conv4_block1_0_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_3_bn (BatchNormali (None, 14, 14, 1024) 4096        conv4_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_add (Add)          (None, 14, 14, 1024) 0           conv4_block1_0_bn[0][0]          
                                                                 conv4_block1_3_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_out (Activation)   (None, 14, 14, 1024) 0           conv4_block1_add[0][0]           
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D)    (None, 14, 14, 256)  262400      conv4_block1_out[0][0]           
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 14, 14, 256)  0           conv4_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D)    (None, 14, 14, 256)  590080      conv4_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_2_relu (Activation (None, 14, 14, 256)  0           conv4_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_3_bn (BatchNormali (None, 14, 14, 1024) 4096        conv4_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_add (Add)          (None, 14, 14, 1024) 0           conv4_block1_out[0][0]           
                                                                 conv4_block2_3_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_out (Activation)   (None, 14, 14, 1024) 0           conv4_block2_add[0][0]           
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D)    (None, 14, 14, 256)  262400      conv4_block2_out[0][0]           
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 14, 14, 256)  0           conv4_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D)    (None, 14, 14, 256)  590080      conv4_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_2_relu (Activation (None, 14, 14, 256)  0           conv4_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_3_bn (BatchNormali (None, 14, 14, 1024) 4096        conv4_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_add (Add)          (None, 14, 14, 1024) 0           conv4_block2_out[0][0]           
                                                                 conv4_block3_3_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_out (Activation)   (None, 14, 14, 1024) 0           conv4_block3_add[0][0]           
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D)    (None, 14, 14, 256)  262400      conv4_block3_out[0][0]           
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 14, 14, 256)  0           conv4_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D)    (None, 14, 14, 256)  590080      conv4_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_2_relu (Activation (None, 14, 14, 256)  0           conv4_block4_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block4_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_3_bn (BatchNormali (None, 14, 14, 1024) 4096        conv4_block4_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_add (Add)          (None, 14, 14, 1024) 0           conv4_block3_out[0][0]           
                                                                 conv4_block4_3_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_out (Activation)   (None, 14, 14, 1024) 0           conv4_block4_add[0][0]           
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D)    (None, 14, 14, 256)  262400      conv4_block4_out[0][0]           
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 14, 14, 256)  0           conv4_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D)    (None, 14, 14, 256)  590080      conv4_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_2_relu (Activation (None, 14, 14, 256)  0           conv4_block5_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block5_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_3_bn (BatchNormali (None, 14, 14, 1024) 4096        conv4_block5_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_add (Add)          (None, 14, 14, 1024) 0           conv4_block4_out[0][0]           
                                                                 conv4_block5_3_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_out (Activation)   (None, 14, 14, 1024) 0           conv4_block5_add[0][0]           
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D)    (None, 14, 14, 256)  262400      conv4_block5_out[0][0]           
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 14, 14, 256)  0           conv4_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D)    (None, 14, 14, 256)  590080      conv4_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_2_relu (Activation (None, 14, 14, 256)  0           conv4_block6_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block6_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_3_bn (BatchNormali (None, 14, 14, 1024) 4096        conv4_block6_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_add (Add)          (None, 14, 14, 1024) 0           conv4_block5_out[0][0]           
                                                                 conv4_block6_3_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_out (Activation)   (None, 14, 14, 1024) 0           conv4_block6_add[0][0]           
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D)    (None, 7, 7, 512)    524800      conv4_block6_out[0][0]           
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 7, 7, 512)    0           conv5_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D)    (None, 7, 7, 512)    2359808     conv5_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_2_relu (Activation (None, 7, 7, 512)    0           conv5_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_0_conv (Conv2D)    (None, 7, 7, 2048)   2099200     conv4_block6_out[0][0]           
__________________________________________________________________________________________________
conv5_block1_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_0_bn (BatchNormali (None, 7, 7, 2048)   8192        conv5_block1_0_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_3_bn (BatchNormali (None, 7, 7, 2048)   8192        conv5_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_add (Add)          (None, 7, 7, 2048)   0           conv5_block1_0_bn[0][0]          
                                                                 conv5_block1_3_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_out (Activation)   (None, 7, 7, 2048)   0           conv5_block1_add[0][0]           
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D)    (None, 7, 7, 512)    1049088     conv5_block1_out[0][0]           
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 7, 7, 512)    0           conv5_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D)    (None, 7, 7, 512)    2359808     conv5_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_2_relu (Activation (None, 7, 7, 512)    0           conv5_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_3_bn (BatchNormali (None, 7, 7, 2048)   8192        conv5_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_add (Add)          (None, 7, 7, 2048)   0           conv5_block1_out[0][0]           
                                                                 conv5_block2_3_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_out (Activation)   (None, 7, 7, 2048)   0           conv5_block2_add[0][0]           
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D)    (None, 7, 7, 512)    1049088     conv5_block2_out[0][0]           
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 7, 7, 512)    0           conv5_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D)    (None, 7, 7, 512)    2359808     conv5_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_2_relu (Activation (None, 7, 7, 512)    0           conv5_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_3_bn (BatchNormali (None, 7, 7, 2048)   8192        conv5_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_add (Add)          (None, 7, 7, 2048)   0           conv5_block2_out[0][0]           
                                                                 conv5_block3_3_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_out (Activation)   (None, 7, 7, 2048)   0           conv5_block3_add[0][0]           
__________________________________________________________________________________________________
flatten_2 (Flatten)             (None, 100352)       0           conv5_block3_out[0][0]           
__________________________________________________________________________________________________
dense_18 (Dense)                (None, 17)           1706001     flatten_2[0][0]                  
==================================================================================================
Total params: 25,293,713
Trainable params: 1,706,001
Non-trainable params: 23,587,712
__________________________________________________________________________________________________
In [47]:
resnet50_model.compile(
  loss='categorical_crossentropy',
  optimizer='adam',
  metrics=['accuracy']
)
In [48]:
import time

StartTime = time.time()
r = resnet50_model.fit_generator(
  training_set,
  epochs=15,
  validation_data = validation_set,
  steps_per_epoch=len(training_set)
)
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
Epoch 1/15
32/32 [==============================] - 116s 4s/step - loss: 13.9966 - accuracy: 0.0745 - val_loss: 7.8679 - val_accuracy: 0.0824
Epoch 2/15
32/32 [==============================] - 115s 4s/step - loss: 4.1504 - accuracy: 0.1745 - val_loss: 2.6533 - val_accuracy: 0.2147
Epoch 3/15
32/32 [==============================] - 115s 4s/step - loss: 2.8550 - accuracy: 0.2686 - val_loss: 3.1326 - val_accuracy: 0.2353
Epoch 4/15
32/32 [==============================] - 121s 4s/step - loss: 2.3630 - accuracy: 0.3216 - val_loss: 2.4882 - val_accuracy: 0.3324
Epoch 5/15
32/32 [==============================] - 157s 5s/step - loss: 2.2918 - accuracy: 0.3382 - val_loss: 3.1325 - val_accuracy: 0.2471
Epoch 6/15
32/32 [==============================] - 201s 6s/step - loss: 2.3474 - accuracy: 0.3451 - val_loss: 3.0377 - val_accuracy: 0.2265
Epoch 7/15
32/32 [==============================] - 206s 6s/step - loss: 2.4398 - accuracy: 0.3686 - val_loss: 2.4246 - val_accuracy: 0.4000
Epoch 8/15
32/32 [==============================] - 204s 6s/step - loss: 2.1487 - accuracy: 0.4216 - val_loss: 2.6855 - val_accuracy: 0.2706
Epoch 9/15
32/32 [==============================] - 201s 6s/step - loss: 2.4767 - accuracy: 0.3745 - val_loss: 3.6522 - val_accuracy: 0.2706
Epoch 10/15
32/32 [==============================] - 202s 6s/step - loss: 2.6670 - accuracy: 0.3706 - val_loss: 3.2251 - val_accuracy: 0.2882
Epoch 11/15
32/32 [==============================] - 201s 6s/step - loss: 2.7394 - accuracy: 0.3863 - val_loss: 3.1634 - val_accuracy: 0.3000
Epoch 12/15
32/32 [==============================] - 206s 6s/step - loss: 2.4617 - accuracy: 0.4275 - val_loss: 3.1302 - val_accuracy: 0.3118
Epoch 13/15
32/32 [==============================] - 205s 6s/step - loss: 1.9809 - accuracy: 0.4422 - val_loss: 2.5140 - val_accuracy: 0.4206
Epoch 14/15
32/32 [==============================] - 202s 6s/step - loss: 1.8347 - accuracy: 0.4922 - val_loss: 2.3095 - val_accuracy: 0.3735
Epoch 15/15
32/32 [==============================] - 204s 6s/step - loss: 1.9265 - accuracy: 0.4814 - val_loss: 2.6525 - val_accuracy: 0.3853
############### Total Time Taken:  46 Minutes #############
In [49]:
results = resnet50_model.evaluate(validation_set)
print('Validation accuracy using ResNet50 is : ', results[1]*100,'%')
11/11 [==============================] - 47s 4s/step - loss: 2.7124 - accuracy: 0.3676
Validation accuracy using ResNet50 is :  36.764705181121826 %
In [50]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(r.history['loss'], label='train loss')
plt.plot(r.history['val_loss'], label='val loss')
plt.legend()
plt.show()
plt.savefig('LossVal_loss')

# plot the accuracy
plt.plot(r.history['accuracy'], label='train acc')
plt.plot(r.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
plt.savefig('AccVal_acc')
<Figure size 432x288 with 0 Axes>
In [51]:
# save it as a pickle file
from tensorflow.keras.models import load_model
inception_model.save('resnet50_model.pkl')
INFO:tensorflow:Assets written to: resnet50_model.pkl\assets

Transfer Learning Using Resnet152V2

In [52]:
# Here we will be using imagenet weights
resnet152V2 = ResNet152V2(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/resnet/resnet152v2_weights_tf_dim_ordering_tf_kernels_notop.h5
234553344/234545216 [==============================] - 29s 0us/step
In [53]:
x = Flatten()(resnet152V2.output)
prediction = Dense(len(folders), activation='softmax')(x)
# don't train existing weights
for layer in resnet152V2.layers:
    layer.trainable = False
# create a model object
resnet152V2_model = Model(inputs=resnet152V2.input, outputs=prediction)

resnet152V2_model.summary()
Model: "functional_5"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_3 (InputLayer)            [(None, 224, 224, 3) 0                                            
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D)       (None, 230, 230, 3)  0           input_3[0][0]                    
__________________________________________________________________________________________________
conv1_conv (Conv2D)             (None, 112, 112, 64) 9472        conv1_pad[0][0]                  
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D)       (None, 114, 114, 64) 0           conv1_conv[0][0]                 
__________________________________________________________________________________________________
pool1_pool (MaxPooling2D)       (None, 56, 56, 64)   0           pool1_pad[0][0]                  
__________________________________________________________________________________________________
conv2_block1_preact_bn (BatchNo (None, 56, 56, 64)   256         pool1_pool[0][0]                 
__________________________________________________________________________________________________
conv2_block1_preact_relu (Activ (None, 56, 56, 64)   0           conv2_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv2_block1_1_conv (Conv2D)    (None, 56, 56, 64)   4096        conv2_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block1_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_1_relu (Activation (None, 56, 56, 64)   0           conv2_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_2_pad (ZeroPadding (None, 58, 58, 64)   0           conv2_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_2_conv (Conv2D)    (None, 56, 56, 64)   36864       conv2_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv2_block1_2_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block1_2_relu (Activation (None, 56, 56, 64)   0           conv2_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv2_block1_0_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block1_3_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block1_out (Add)          (None, 56, 56, 256)  0           conv2_block1_0_conv[0][0]        
                                                                 conv2_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_preact_bn (BatchNo (None, 56, 56, 256)  1024        conv2_block1_out[0][0]           
__________________________________________________________________________________________________
conv2_block2_preact_relu (Activ (None, 56, 56, 256)  0           conv2_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv2_block2_1_conv (Conv2D)    (None, 56, 56, 64)   16384       conv2_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block2_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_1_relu (Activation (None, 56, 56, 64)   0           conv2_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_2_pad (ZeroPadding (None, 58, 58, 64)   0           conv2_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_2_conv (Conv2D)    (None, 56, 56, 64)   36864       conv2_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv2_block2_2_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block2_2_relu (Activation (None, 56, 56, 64)   0           conv2_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv2_block2_3_conv (Conv2D)    (None, 56, 56, 256)  16640       conv2_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block2_out (Add)          (None, 56, 56, 256)  0           conv2_block1_out[0][0]           
                                                                 conv2_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_preact_bn (BatchNo (None, 56, 56, 256)  1024        conv2_block2_out[0][0]           
__________________________________________________________________________________________________
conv2_block3_preact_relu (Activ (None, 56, 56, 256)  0           conv2_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv2_block3_1_conv (Conv2D)    (None, 56, 56, 64)   16384       conv2_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv2_block3_1_bn (BatchNormali (None, 56, 56, 64)   256         conv2_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_1_relu (Activation (None, 56, 56, 64)   0           conv2_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv2_block3_2_pad (ZeroPadding (None, 58, 58, 64)   0           conv2_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_2_conv (Conv2D)    (None, 28, 28, 64)   36864       conv2_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv2_block3_2_bn (BatchNormali (None, 28, 28, 64)   256         conv2_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv2_block3_2_relu (Activation (None, 28, 28, 64)   0           conv2_block3_2_bn[0][0]          
__________________________________________________________________________________________________
max_pooling2d_6 (MaxPooling2D)  (None, 28, 28, 256)  0           conv2_block2_out[0][0]           
__________________________________________________________________________________________________
conv2_block3_3_conv (Conv2D)    (None, 28, 28, 256)  16640       conv2_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv2_block3_out (Add)          (None, 28, 28, 256)  0           max_pooling2d_6[0][0]            
                                                                 conv2_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_preact_bn (BatchNo (None, 28, 28, 256)  1024        conv2_block3_out[0][0]           
__________________________________________________________________________________________________
conv3_block1_preact_relu (Activ (None, 28, 28, 256)  0           conv3_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block1_1_conv (Conv2D)    (None, 28, 28, 128)  32768       conv3_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block1_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_1_relu (Activation (None, 28, 28, 128)  0           conv3_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block1_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block1_2_relu (Activation (None, 28, 28, 128)  0           conv3_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block1_0_conv (Conv2D)    (None, 28, 28, 512)  131584      conv3_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block1_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block1_out (Add)          (None, 28, 28, 512)  0           conv3_block1_0_conv[0][0]        
                                                                 conv3_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block1_out[0][0]           
__________________________________________________________________________________________________
conv3_block2_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block2_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block2_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_1_relu (Activation (None, 28, 28, 128)  0           conv3_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block2_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block2_2_relu (Activation (None, 28, 28, 128)  0           conv3_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block2_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block2_out (Add)          (None, 28, 28, 512)  0           conv3_block1_out[0][0]           
                                                                 conv3_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block2_out[0][0]           
__________________________________________________________________________________________________
conv3_block3_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block3_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block3_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_1_relu (Activation (None, 28, 28, 128)  0           conv3_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block3_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block3_2_relu (Activation (None, 28, 28, 128)  0           conv3_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block3_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block3_out (Add)          (None, 28, 28, 512)  0           conv3_block2_out[0][0]           
                                                                 conv3_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block3_out[0][0]           
__________________________________________________________________________________________________
conv3_block4_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block4_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block4_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block4_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block4_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_1_relu (Activation (None, 28, 28, 128)  0           conv3_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block4_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block4_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block4_2_relu (Activation (None, 28, 28, 128)  0           conv3_block4_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block4_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block4_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block4_out (Add)          (None, 28, 28, 512)  0           conv3_block3_out[0][0]           
                                                                 conv3_block4_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block4_out[0][0]           
__________________________________________________________________________________________________
conv3_block5_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block5_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block5_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block5_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block5_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_1_relu (Activation (None, 28, 28, 128)  0           conv3_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block5_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block5_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block5_2_relu (Activation (None, 28, 28, 128)  0           conv3_block5_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block5_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block5_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block5_out (Add)          (None, 28, 28, 512)  0           conv3_block4_out[0][0]           
                                                                 conv3_block5_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block5_out[0][0]           
__________________________________________________________________________________________________
conv3_block6_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block6_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block6_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block6_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block6_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_1_relu (Activation (None, 28, 28, 128)  0           conv3_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block6_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block6_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block6_2_relu (Activation (None, 28, 28, 128)  0           conv3_block6_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block6_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block6_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block6_out (Add)          (None, 28, 28, 512)  0           conv3_block5_out[0][0]           
                                                                 conv3_block6_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block6_out[0][0]           
__________________________________________________________________________________________________
conv3_block7_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block7_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block7_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block7_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block7_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_1_relu (Activation (None, 28, 28, 128)  0           conv3_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_2_conv (Conv2D)    (None, 28, 28, 128)  147456      conv3_block7_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block7_2_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block7_2_relu (Activation (None, 28, 28, 128)  0           conv3_block7_2_bn[0][0]          
__________________________________________________________________________________________________
conv3_block7_3_conv (Conv2D)    (None, 28, 28, 512)  66048       conv3_block7_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block7_out (Add)          (None, 28, 28, 512)  0           conv3_block6_out[0][0]           
                                                                 conv3_block7_3_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_preact_bn (BatchNo (None, 28, 28, 512)  2048        conv3_block7_out[0][0]           
__________________________________________________________________________________________________
conv3_block8_preact_relu (Activ (None, 28, 28, 512)  0           conv3_block8_preact_bn[0][0]     
__________________________________________________________________________________________________
conv3_block8_1_conv (Conv2D)    (None, 28, 28, 128)  65536       conv3_block8_preact_relu[0][0]   
__________________________________________________________________________________________________
conv3_block8_1_bn (BatchNormali (None, 28, 28, 128)  512         conv3_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_1_relu (Activation (None, 28, 28, 128)  0           conv3_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv3_block8_2_pad (ZeroPadding (None, 30, 30, 128)  0           conv3_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_2_conv (Conv2D)    (None, 14, 14, 128)  147456      conv3_block8_2_pad[0][0]         
__________________________________________________________________________________________________
conv3_block8_2_bn (BatchNormali (None, 14, 14, 128)  512         conv3_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv3_block8_2_relu (Activation (None, 14, 14, 128)  0           conv3_block8_2_bn[0][0]          
__________________________________________________________________________________________________
max_pooling2d_7 (MaxPooling2D)  (None, 14, 14, 512)  0           conv3_block7_out[0][0]           
__________________________________________________________________________________________________
conv3_block8_3_conv (Conv2D)    (None, 14, 14, 512)  66048       conv3_block8_2_relu[0][0]        
__________________________________________________________________________________________________
conv3_block8_out (Add)          (None, 14, 14, 512)  0           max_pooling2d_7[0][0]            
                                                                 conv3_block8_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_preact_bn (BatchNo (None, 14, 14, 512)  2048        conv3_block8_out[0][0]           
__________________________________________________________________________________________________
conv4_block1_preact_relu (Activ (None, 14, 14, 512)  0           conv4_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block1_1_conv (Conv2D)    (None, 14, 14, 256)  131072      conv4_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block1_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_1_relu (Activation (None, 14, 14, 256)  0           conv4_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block1_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block1_2_relu (Activation (None, 14, 14, 256)  0           conv4_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block1_0_conv (Conv2D)    (None, 14, 14, 1024) 525312      conv4_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block1_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block1_out (Add)          (None, 14, 14, 1024) 0           conv4_block1_0_conv[0][0]        
                                                                 conv4_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block1_out[0][0]           
__________________________________________________________________________________________________
conv4_block2_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block2_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block2_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_1_relu (Activation (None, 14, 14, 256)  0           conv4_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block2_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block2_2_relu (Activation (None, 14, 14, 256)  0           conv4_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block2_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block2_out (Add)          (None, 14, 14, 1024) 0           conv4_block1_out[0][0]           
                                                                 conv4_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block2_out[0][0]           
__________________________________________________________________________________________________
conv4_block3_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block3_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block3_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_1_relu (Activation (None, 14, 14, 256)  0           conv4_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block3_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block3_2_relu (Activation (None, 14, 14, 256)  0           conv4_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block3_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block3_out (Add)          (None, 14, 14, 1024) 0           conv4_block2_out[0][0]           
                                                                 conv4_block3_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block3_out[0][0]           
__________________________________________________________________________________________________
conv4_block4_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block4_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block4_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block4_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block4_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block4_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_1_relu (Activation (None, 14, 14, 256)  0           conv4_block4_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block4_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block4_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block4_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block4_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block4_2_relu (Activation (None, 14, 14, 256)  0           conv4_block4_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block4_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block4_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block4_out (Add)          (None, 14, 14, 1024) 0           conv4_block3_out[0][0]           
                                                                 conv4_block4_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block4_out[0][0]           
__________________________________________________________________________________________________
conv4_block5_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block5_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block5_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block5_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block5_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block5_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_1_relu (Activation (None, 14, 14, 256)  0           conv4_block5_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block5_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block5_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block5_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block5_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block5_2_relu (Activation (None, 14, 14, 256)  0           conv4_block5_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block5_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block5_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block5_out (Add)          (None, 14, 14, 1024) 0           conv4_block4_out[0][0]           
                                                                 conv4_block5_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block5_out[0][0]           
__________________________________________________________________________________________________
conv4_block6_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block6_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block6_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block6_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block6_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block6_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_1_relu (Activation (None, 14, 14, 256)  0           conv4_block6_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block6_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block6_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block6_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block6_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block6_2_relu (Activation (None, 14, 14, 256)  0           conv4_block6_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block6_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block6_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block6_out (Add)          (None, 14, 14, 1024) 0           conv4_block5_out[0][0]           
                                                                 conv4_block6_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block6_out[0][0]           
__________________________________________________________________________________________________
conv4_block7_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block7_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block7_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block7_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block7_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block7_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_1_relu (Activation (None, 14, 14, 256)  0           conv4_block7_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block7_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block7_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block7_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block7_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block7_2_relu (Activation (None, 14, 14, 256)  0           conv4_block7_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block7_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block7_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block7_out (Add)          (None, 14, 14, 1024) 0           conv4_block6_out[0][0]           
                                                                 conv4_block7_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block7_out[0][0]           
__________________________________________________________________________________________________
conv4_block8_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block8_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block8_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block8_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block8_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block8_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_1_relu (Activation (None, 14, 14, 256)  0           conv4_block8_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block8_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block8_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block8_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block8_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block8_2_relu (Activation (None, 14, 14, 256)  0           conv4_block8_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block8_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block8_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block8_out (Add)          (None, 14, 14, 1024) 0           conv4_block7_out[0][0]           
                                                                 conv4_block8_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_preact_bn (BatchNo (None, 14, 14, 1024) 4096        conv4_block8_out[0][0]           
__________________________________________________________________________________________________
conv4_block9_preact_relu (Activ (None, 14, 14, 1024) 0           conv4_block9_preact_bn[0][0]     
__________________________________________________________________________________________________
conv4_block9_1_conv (Conv2D)    (None, 14, 14, 256)  262144      conv4_block9_preact_relu[0][0]   
__________________________________________________________________________________________________
conv4_block9_1_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block9_1_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_1_relu (Activation (None, 14, 14, 256)  0           conv4_block9_1_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_2_pad (ZeroPadding (None, 16, 16, 256)  0           conv4_block9_1_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_2_conv (Conv2D)    (None, 14, 14, 256)  589824      conv4_block9_2_pad[0][0]         
__________________________________________________________________________________________________
conv4_block9_2_bn (BatchNormali (None, 14, 14, 256)  1024        conv4_block9_2_conv[0][0]        
__________________________________________________________________________________________________
conv4_block9_2_relu (Activation (None, 14, 14, 256)  0           conv4_block9_2_bn[0][0]          
__________________________________________________________________________________________________
conv4_block9_3_conv (Conv2D)    (None, 14, 14, 1024) 263168      conv4_block9_2_relu[0][0]        
__________________________________________________________________________________________________
conv4_block9_out (Add)          (None, 14, 14, 1024) 0           conv4_block8_out[0][0]           
                                                                 conv4_block9_3_conv[0][0]        
__________________________________________________________________________________________________
conv4_block10_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block9_out[0][0]           
__________________________________________________________________________________________________
conv4_block10_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block10_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block10_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block10_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block10_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block10_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block10_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block10_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block10_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block10_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block10_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block10_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block10_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block10_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block10_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block10_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block10_out (Add)         (None, 14, 14, 1024) 0           conv4_block9_out[0][0]           
                                                                 conv4_block10_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block10_out[0][0]          
__________________________________________________________________________________________________
conv4_block11_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block11_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block11_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block11_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block11_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block11_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block11_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block11_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block11_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block11_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block11_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block11_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block11_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block11_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block11_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block11_out (Add)         (None, 14, 14, 1024) 0           conv4_block10_out[0][0]          
                                                                 conv4_block11_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block11_out[0][0]          
__________________________________________________________________________________________________
conv4_block12_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block12_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block12_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block12_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block12_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block12_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block12_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block12_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block12_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block12_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block12_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block12_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block12_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block12_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block12_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block12_out (Add)         (None, 14, 14, 1024) 0           conv4_block11_out[0][0]          
                                                                 conv4_block12_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block12_out[0][0]          
__________________________________________________________________________________________________
conv4_block13_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block13_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block13_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block13_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block13_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block13_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block13_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block13_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block13_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block13_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block13_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block13_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block13_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block13_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block13_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block13_out (Add)         (None, 14, 14, 1024) 0           conv4_block12_out[0][0]          
                                                                 conv4_block13_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block13_out[0][0]          
__________________________________________________________________________________________________
conv4_block14_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block14_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block14_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block14_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block14_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block14_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block14_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block14_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block14_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block14_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block14_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block14_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block14_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block14_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block14_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block14_out (Add)         (None, 14, 14, 1024) 0           conv4_block13_out[0][0]          
                                                                 conv4_block14_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block14_out[0][0]          
__________________________________________________________________________________________________
conv4_block15_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block15_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block15_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block15_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block15_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block15_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block15_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block15_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block15_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block15_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block15_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block15_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block15_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block15_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block15_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block15_out (Add)         (None, 14, 14, 1024) 0           conv4_block14_out[0][0]          
                                                                 conv4_block15_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block15_out[0][0]          
__________________________________________________________________________________________________
conv4_block16_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block16_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block16_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block16_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block16_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block16_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block16_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block16_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block16_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block16_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block16_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block16_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block16_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block16_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block16_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block16_out (Add)         (None, 14, 14, 1024) 0           conv4_block15_out[0][0]          
                                                                 conv4_block16_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block16_out[0][0]          
__________________________________________________________________________________________________
conv4_block17_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block17_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block17_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block17_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block17_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block17_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block17_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block17_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block17_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block17_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block17_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block17_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block17_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block17_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block17_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block17_out (Add)         (None, 14, 14, 1024) 0           conv4_block16_out[0][0]          
                                                                 conv4_block17_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block17_out[0][0]          
__________________________________________________________________________________________________
conv4_block18_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block18_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block18_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block18_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block18_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block18_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block18_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block18_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block18_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block18_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block18_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block18_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block18_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block18_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block18_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block18_out (Add)         (None, 14, 14, 1024) 0           conv4_block17_out[0][0]          
                                                                 conv4_block18_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block18_out[0][0]          
__________________________________________________________________________________________________
conv4_block19_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block19_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block19_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block19_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block19_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block19_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block19_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block19_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block19_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block19_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block19_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block19_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block19_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block19_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block19_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block19_out (Add)         (None, 14, 14, 1024) 0           conv4_block18_out[0][0]          
                                                                 conv4_block19_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block19_out[0][0]          
__________________________________________________________________________________________________
conv4_block20_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block20_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block20_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block20_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block20_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block20_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block20_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block20_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block20_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block20_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block20_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block20_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block20_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block20_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block20_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block20_out (Add)         (None, 14, 14, 1024) 0           conv4_block19_out[0][0]          
                                                                 conv4_block20_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block20_out[0][0]          
__________________________________________________________________________________________________
conv4_block21_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block21_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block21_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block21_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block21_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block21_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block21_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block21_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block21_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block21_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block21_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block21_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block21_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block21_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block21_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block21_out (Add)         (None, 14, 14, 1024) 0           conv4_block20_out[0][0]          
                                                                 conv4_block21_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block21_out[0][0]          
__________________________________________________________________________________________________
conv4_block22_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block22_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block22_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block22_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block22_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block22_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block22_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block22_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block22_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block22_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block22_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block22_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block22_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block22_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block22_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block22_out (Add)         (None, 14, 14, 1024) 0           conv4_block21_out[0][0]          
                                                                 conv4_block22_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block22_out[0][0]          
__________________________________________________________________________________________________
conv4_block23_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block23_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block23_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block23_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block23_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block23_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block23_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block23_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block23_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block23_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block23_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block23_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block23_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block23_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block23_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block23_out (Add)         (None, 14, 14, 1024) 0           conv4_block22_out[0][0]          
                                                                 conv4_block23_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block23_out[0][0]          
__________________________________________________________________________________________________
conv4_block24_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block24_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block24_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block24_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block24_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block24_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block24_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block24_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block24_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block24_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block24_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block24_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block24_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block24_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block24_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block24_out (Add)         (None, 14, 14, 1024) 0           conv4_block23_out[0][0]          
                                                                 conv4_block24_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block25_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block24_out[0][0]          
__________________________________________________________________________________________________
conv4_block25_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block25_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block25_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block25_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block25_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block25_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block25_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block25_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block25_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block25_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block25_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block25_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block25_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block25_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block25_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block25_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block25_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block25_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block25_out (Add)         (None, 14, 14, 1024) 0           conv4_block24_out[0][0]          
                                                                 conv4_block25_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block26_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block25_out[0][0]          
__________________________________________________________________________________________________
conv4_block26_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block26_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block26_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block26_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block26_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block26_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block26_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block26_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block26_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block26_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block26_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block26_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block26_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block26_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block26_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block26_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block26_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block26_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block26_out (Add)         (None, 14, 14, 1024) 0           conv4_block25_out[0][0]          
                                                                 conv4_block26_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block27_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block26_out[0][0]          
__________________________________________________________________________________________________
conv4_block27_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block27_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block27_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block27_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block27_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block27_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block27_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block27_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block27_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block27_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block27_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block27_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block27_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block27_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block27_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block27_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block27_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block27_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block27_out (Add)         (None, 14, 14, 1024) 0           conv4_block26_out[0][0]          
                                                                 conv4_block27_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block28_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block27_out[0][0]          
__________________________________________________________________________________________________
conv4_block28_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block28_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block28_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block28_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block28_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block28_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block28_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block28_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block28_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block28_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block28_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block28_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block28_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block28_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block28_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block28_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block28_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block28_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block28_out (Add)         (None, 14, 14, 1024) 0           conv4_block27_out[0][0]          
                                                                 conv4_block28_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block29_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block28_out[0][0]          
__________________________________________________________________________________________________
conv4_block29_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block29_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block29_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block29_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block29_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block29_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block29_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block29_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block29_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block29_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block29_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block29_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block29_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block29_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block29_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block29_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block29_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block29_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block29_out (Add)         (None, 14, 14, 1024) 0           conv4_block28_out[0][0]          
                                                                 conv4_block29_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block30_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block29_out[0][0]          
__________________________________________________________________________________________________
conv4_block30_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block30_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block30_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block30_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block30_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block30_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block30_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block30_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block30_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block30_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block30_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block30_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block30_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block30_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block30_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block30_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block30_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block30_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block30_out (Add)         (None, 14, 14, 1024) 0           conv4_block29_out[0][0]          
                                                                 conv4_block30_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block31_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block30_out[0][0]          
__________________________________________________________________________________________________
conv4_block31_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block31_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block31_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block31_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block31_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block31_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block31_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block31_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block31_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block31_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block31_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block31_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block31_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block31_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block31_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block31_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block31_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block31_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block31_out (Add)         (None, 14, 14, 1024) 0           conv4_block30_out[0][0]          
                                                                 conv4_block31_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block32_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block31_out[0][0]          
__________________________________________________________________________________________________
conv4_block32_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block32_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block32_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block32_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block32_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block32_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block32_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block32_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block32_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block32_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block32_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block32_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block32_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block32_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block32_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block32_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block32_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block32_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block32_out (Add)         (None, 14, 14, 1024) 0           conv4_block31_out[0][0]          
                                                                 conv4_block32_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block33_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block32_out[0][0]          
__________________________________________________________________________________________________
conv4_block33_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block33_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block33_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block33_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block33_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block33_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block33_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block33_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block33_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block33_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block33_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block33_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block33_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block33_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block33_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block33_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block33_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block33_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block33_out (Add)         (None, 14, 14, 1024) 0           conv4_block32_out[0][0]          
                                                                 conv4_block33_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block34_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block33_out[0][0]          
__________________________________________________________________________________________________
conv4_block34_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block34_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block34_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block34_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block34_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block34_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block34_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block34_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block34_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block34_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block34_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block34_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block34_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block34_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block34_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block34_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block34_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block34_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block34_out (Add)         (None, 14, 14, 1024) 0           conv4_block33_out[0][0]          
                                                                 conv4_block34_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block35_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block34_out[0][0]          
__________________________________________________________________________________________________
conv4_block35_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block35_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block35_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block35_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block35_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block35_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block35_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block35_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block35_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block35_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block35_2_conv (Conv2D)   (None, 14, 14, 256)  589824      conv4_block35_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block35_2_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block35_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block35_2_relu (Activatio (None, 14, 14, 256)  0           conv4_block35_2_bn[0][0]         
__________________________________________________________________________________________________
conv4_block35_3_conv (Conv2D)   (None, 14, 14, 1024) 263168      conv4_block35_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block35_out (Add)         (None, 14, 14, 1024) 0           conv4_block34_out[0][0]          
                                                                 conv4_block35_3_conv[0][0]       
__________________________________________________________________________________________________
conv4_block36_preact_bn (BatchN (None, 14, 14, 1024) 4096        conv4_block35_out[0][0]          
__________________________________________________________________________________________________
conv4_block36_preact_relu (Acti (None, 14, 14, 1024) 0           conv4_block36_preact_bn[0][0]    
__________________________________________________________________________________________________
conv4_block36_1_conv (Conv2D)   (None, 14, 14, 256)  262144      conv4_block36_preact_relu[0][0]  
__________________________________________________________________________________________________
conv4_block36_1_bn (BatchNormal (None, 14, 14, 256)  1024        conv4_block36_1_conv[0][0]       
__________________________________________________________________________________________________
conv4_block36_1_relu (Activatio (None, 14, 14, 256)  0           conv4_block36_1_bn[0][0]         
__________________________________________________________________________________________________
conv4_block36_2_pad (ZeroPaddin (None, 16, 16, 256)  0           conv4_block36_1_relu[0][0]       
__________________________________________________________________________________________________
conv4_block36_2_conv (Conv2D)   (None, 7, 7, 256)    589824      conv4_block36_2_pad[0][0]        
__________________________________________________________________________________________________
conv4_block36_2_bn (BatchNormal (None, 7, 7, 256)    1024        conv4_block36_2_conv[0][0]       
__________________________________________________________________________________________________
conv4_block36_2_relu (Activatio (None, 7, 7, 256)    0           conv4_block36_2_bn[0][0]         
__________________________________________________________________________________________________
max_pooling2d_8 (MaxPooling2D)  (None, 7, 7, 1024)   0           conv4_block35_out[0][0]          
__________________________________________________________________________________________________
conv4_block36_3_conv (Conv2D)   (None, 7, 7, 1024)   263168      conv4_block36_2_relu[0][0]       
__________________________________________________________________________________________________
conv4_block36_out (Add)         (None, 7, 7, 1024)   0           max_pooling2d_8[0][0]            
                                                                 conv4_block36_3_conv[0][0]       
__________________________________________________________________________________________________
conv5_block1_preact_bn (BatchNo (None, 7, 7, 1024)   4096        conv4_block36_out[0][0]          
__________________________________________________________________________________________________
conv5_block1_preact_relu (Activ (None, 7, 7, 1024)   0           conv5_block1_preact_bn[0][0]     
__________________________________________________________________________________________________
conv5_block1_1_conv (Conv2D)    (None, 7, 7, 512)    524288      conv5_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block1_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block1_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_1_relu (Activation (None, 7, 7, 512)    0           conv5_block1_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_2_pad (ZeroPadding (None, 9, 9, 512)    0           conv5_block1_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_2_conv (Conv2D)    (None, 7, 7, 512)    2359296     conv5_block1_2_pad[0][0]         
__________________________________________________________________________________________________
conv5_block1_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block1_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block1_2_relu (Activation (None, 7, 7, 512)    0           conv5_block1_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block1_0_conv (Conv2D)    (None, 7, 7, 2048)   2099200     conv5_block1_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block1_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block1_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block1_out (Add)          (None, 7, 7, 2048)   0           conv5_block1_0_conv[0][0]        
                                                                 conv5_block1_3_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_preact_bn (BatchNo (None, 7, 7, 2048)   8192        conv5_block1_out[0][0]           
__________________________________________________________________________________________________
conv5_block2_preact_relu (Activ (None, 7, 7, 2048)   0           conv5_block2_preact_bn[0][0]     
__________________________________________________________________________________________________
conv5_block2_1_conv (Conv2D)    (None, 7, 7, 512)    1048576     conv5_block2_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block2_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block2_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_1_relu (Activation (None, 7, 7, 512)    0           conv5_block2_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_2_pad (ZeroPadding (None, 9, 9, 512)    0           conv5_block2_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_2_conv (Conv2D)    (None, 7, 7, 512)    2359296     conv5_block2_2_pad[0][0]         
__________________________________________________________________________________________________
conv5_block2_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block2_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block2_2_relu (Activation (None, 7, 7, 512)    0           conv5_block2_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block2_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block2_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block2_out (Add)          (None, 7, 7, 2048)   0           conv5_block1_out[0][0]           
                                                                 conv5_block2_3_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_preact_bn (BatchNo (None, 7, 7, 2048)   8192        conv5_block2_out[0][0]           
__________________________________________________________________________________________________
conv5_block3_preact_relu (Activ (None, 7, 7, 2048)   0           conv5_block3_preact_bn[0][0]     
__________________________________________________________________________________________________
conv5_block3_1_conv (Conv2D)    (None, 7, 7, 512)    1048576     conv5_block3_preact_relu[0][0]   
__________________________________________________________________________________________________
conv5_block3_1_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block3_1_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_1_relu (Activation (None, 7, 7, 512)    0           conv5_block3_1_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_2_pad (ZeroPadding (None, 9, 9, 512)    0           conv5_block3_1_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_2_conv (Conv2D)    (None, 7, 7, 512)    2359296     conv5_block3_2_pad[0][0]         
__________________________________________________________________________________________________
conv5_block3_2_bn (BatchNormali (None, 7, 7, 512)    2048        conv5_block3_2_conv[0][0]        
__________________________________________________________________________________________________
conv5_block3_2_relu (Activation (None, 7, 7, 512)    0           conv5_block3_2_bn[0][0]          
__________________________________________________________________________________________________
conv5_block3_3_conv (Conv2D)    (None, 7, 7, 2048)   1050624     conv5_block3_2_relu[0][0]        
__________________________________________________________________________________________________
conv5_block3_out (Add)          (None, 7, 7, 2048)   0           conv5_block2_out[0][0]           
                                                                 conv5_block3_3_conv[0][0]        
__________________________________________________________________________________________________
post_bn (BatchNormalization)    (None, 7, 7, 2048)   8192        conv5_block3_out[0][0]           
__________________________________________________________________________________________________
post_relu (Activation)          (None, 7, 7, 2048)   0           post_bn[0][0]                    
__________________________________________________________________________________________________
flatten_3 (Flatten)             (None, 100352)       0           post_relu[0][0]                  
__________________________________________________________________________________________________
dense_19 (Dense)                (None, 17)           1706001     flatten_3[0][0]                  
==================================================================================================
Total params: 60,037,649
Trainable params: 1,706,001
Non-trainable params: 58,331,648
__________________________________________________________________________________________________
In [54]:
resnet152V2_model.compile(
  loss='categorical_crossentropy',
  optimizer='adam',
  metrics=['accuracy']
)
In [55]:
import time

StartTime = time.time()
r = resnet152V2_model.fit_generator(
  training_set,
  epochs=15,
  validation_data = validation_set,
  steps_per_epoch=len(training_set)
)
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
Epoch 1/15
32/32 [==============================] - 545s 17s/step - loss: 2.8983 - accuracy: 0.6706 - val_loss: 2.1266 - val_accuracy: 0.8206
Epoch 2/15
32/32 [==============================] - 544s 17s/step - loss: 0.8583 - accuracy: 0.9039 - val_loss: 2.8788 - val_accuracy: 0.8206
Epoch 3/15
32/32 [==============================] - 539s 17s/step - loss: 0.5931 - accuracy: 0.9382 - val_loss: 2.5142 - val_accuracy: 0.8206
Epoch 4/15
32/32 [==============================] - 537s 17s/step - loss: 0.4884 - accuracy: 0.9500 - val_loss: 3.2305 - val_accuracy: 0.8265
Epoch 5/15
32/32 [==============================] - 375s 12s/step - loss: 0.4112 - accuracy: 0.9480 - val_loss: 2.1291 - val_accuracy: 0.8441
Epoch 6/15
32/32 [==============================] - 310s 10s/step - loss: 0.3790 - accuracy: 0.9618 - val_loss: 1.8628 - val_accuracy: 0.8765
Epoch 7/15
32/32 [==============================] - 311s 10s/step - loss: 0.2730 - accuracy: 0.9696 - val_loss: 2.1724 - val_accuracy: 0.8882
Epoch 8/15
32/32 [==============================] - 306s 10s/step - loss: 0.2426 - accuracy: 0.9716 - val_loss: 2.9102 - val_accuracy: 0.8618
Epoch 9/15
32/32 [==============================] - 307s 10s/step - loss: 0.2070 - accuracy: 0.9775 - val_loss: 2.7294 - val_accuracy: 0.8941
Epoch 10/15
32/32 [==============================] - 306s 10s/step - loss: 0.2584 - accuracy: 0.9716 - val_loss: 3.3464 - val_accuracy: 0.8441
Epoch 11/15
32/32 [==============================] - 306s 10s/step - loss: 0.3183 - accuracy: 0.9686 - val_loss: 3.6833 - val_accuracy: 0.8765
Epoch 12/15
32/32 [==============================] - 307s 10s/step - loss: 0.1724 - accuracy: 0.9804 - val_loss: 2.7343 - val_accuracy: 0.8559
Epoch 13/15
32/32 [==============================] - 307s 10s/step - loss: 0.2121 - accuracy: 0.9833 - val_loss: 2.8117 - val_accuracy: 0.8794
Epoch 14/15
32/32 [==============================] - 309s 10s/step - loss: 0.0938 - accuracy: 0.9882 - val_loss: 3.1312 - val_accuracy: 0.8882
Epoch 15/15
32/32 [==============================] - 305s 10s/step - loss: 0.1970 - accuracy: 0.9833 - val_loss: 3.7805 - val_accuracy: 0.8647
############### Total Time Taken:  96 Minutes #############
In [56]:
results = resnet152V2_model.evaluate(validation_set)
print('Validation accuracy using ResNet152V2 is : ', results[1]*100,'%')
11/11 [==============================] - 73s 7s/step - loss: 3.7132 - accuracy: 0.8618
Validation accuracy using ResNet152V2 is :  86.17647290229797 %
In [57]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(r.history['loss'], label='train loss')
plt.plot(r.history['val_loss'], label='val loss')
plt.legend()
plt.show()

# plot the accuracy
plt.plot(r.history['accuracy'], label='train acc')
plt.plot(r.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
In [58]:
# save it as a pickle file
from tensorflow.keras.models import load_model
inception_model.save('resnet152V2_model.pkl')
INFO:tensorflow:Assets written to: resnet152V2_model.pkl\assets

Transfer Learning Using VGG16

In [59]:
# Here we will be using imagenet weights
VGG16 = VGG16(input_shape=IMAGE_SIZE + [3], weights='imagenet', include_top=False)
Downloading data from https://storage.googleapis.com/tensorflow/keras-applications/vgg16/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5
58892288/58889256 [==============================] - 13s 0us/step
In [60]:
x = Flatten()(VGG16.output)
prediction = Dense(len(folders), activation='softmax')(x)
# don't train existing weights
for layer in VGG16.layers:
    layer.trainable = False
# create a model object
VGG16_model = Model(inputs=VGG16.input, outputs=prediction)

VGG16_model.summary()
Model: "functional_7"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_4 (InputLayer)         [(None, 224, 224, 3)]     0         
_________________________________________________________________
block1_conv1 (Conv2D)        (None, 224, 224, 64)      1792      
_________________________________________________________________
block1_conv2 (Conv2D)        (None, 224, 224, 64)      36928     
_________________________________________________________________
block1_pool (MaxPooling2D)   (None, 112, 112, 64)      0         
_________________________________________________________________
block2_conv1 (Conv2D)        (None, 112, 112, 128)     73856     
_________________________________________________________________
block2_conv2 (Conv2D)        (None, 112, 112, 128)     147584    
_________________________________________________________________
block2_pool (MaxPooling2D)   (None, 56, 56, 128)       0         
_________________________________________________________________
block3_conv1 (Conv2D)        (None, 56, 56, 256)       295168    
_________________________________________________________________
block3_conv2 (Conv2D)        (None, 56, 56, 256)       590080    
_________________________________________________________________
block3_conv3 (Conv2D)        (None, 56, 56, 256)       590080    
_________________________________________________________________
block3_pool (MaxPooling2D)   (None, 28, 28, 256)       0         
_________________________________________________________________
block4_conv1 (Conv2D)        (None, 28, 28, 512)       1180160   
_________________________________________________________________
block4_conv2 (Conv2D)        (None, 28, 28, 512)       2359808   
_________________________________________________________________
block4_conv3 (Conv2D)        (None, 28, 28, 512)       2359808   
_________________________________________________________________
block4_pool (MaxPooling2D)   (None, 14, 14, 512)       0         
_________________________________________________________________
block5_conv1 (Conv2D)        (None, 14, 14, 512)       2359808   
_________________________________________________________________
block5_conv2 (Conv2D)        (None, 14, 14, 512)       2359808   
_________________________________________________________________
block5_conv3 (Conv2D)        (None, 14, 14, 512)       2359808   
_________________________________________________________________
block5_pool (MaxPooling2D)   (None, 7, 7, 512)         0         
_________________________________________________________________
flatten_4 (Flatten)          (None, 25088)             0         
_________________________________________________________________
dense_20 (Dense)             (None, 17)                426513    
=================================================================
Total params: 15,141,201
Trainable params: 426,513
Non-trainable params: 14,714,688
_________________________________________________________________
In [61]:
VGG16_model.compile(
  loss='categorical_crossentropy',
  optimizer='adam',
  metrics=['accuracy']
)
In [62]:
import time

StartTime = time.time()
r = VGG16_model.fit_generator(
  training_set,
  epochs=15,
  validation_data = validation_set,
  steps_per_epoch=len(training_set)
)
EndTime=time.time()
print("############### Total Time Taken: ", round((EndTime-StartTime)/60), 'Minutes #############')
Epoch 1/15
32/32 [==============================] - 354s 11s/step - loss: 2.0029 - accuracy: 0.4324 - val_loss: 0.9262 - val_accuracy: 0.7029
Epoch 2/15
32/32 [==============================] - 353s 11s/step - loss: 0.5526 - accuracy: 0.8294 - val_loss: 0.5635 - val_accuracy: 0.8294
Epoch 3/15
32/32 [==============================] - 359s 11s/step - loss: 0.3343 - accuracy: 0.9078 - val_loss: 0.6929 - val_accuracy: 0.7647
Epoch 4/15
32/32 [==============================] - 354s 11s/step - loss: 0.2292 - accuracy: 0.9402 - val_loss: 0.5956 - val_accuracy: 0.8294
Epoch 5/15
32/32 [==============================] - 353s 11s/step - loss: 0.1393 - accuracy: 0.9647 - val_loss: 0.7291 - val_accuracy: 0.8059
Epoch 6/15
32/32 [==============================] - 356s 11s/step - loss: 0.1270 - accuracy: 0.9686 - val_loss: 0.5377 - val_accuracy: 0.8559
Epoch 7/15
32/32 [==============================] - 354s 11s/step - loss: 0.0678 - accuracy: 0.9902 - val_loss: 0.5681 - val_accuracy: 0.8324
Epoch 8/15
32/32 [==============================] - 355s 11s/step - loss: 0.0521 - accuracy: 0.9941 - val_loss: 0.4605 - val_accuracy: 0.8588
Epoch 9/15
32/32 [==============================] - 601s 19s/step - loss: 0.0420 - accuracy: 0.9961 - val_loss: 0.5008 - val_accuracy: 0.8618
Epoch 10/15
32/32 [==============================] - 611s 19s/step - loss: 0.0308 - accuracy: 0.9971 - val_loss: 0.5679 - val_accuracy: 0.8559
Epoch 11/15
32/32 [==============================] - 624s 19s/step - loss: 0.0257 - accuracy: 0.9980 - val_loss: 0.4578 - val_accuracy: 0.8765
Epoch 12/15
32/32 [==============================] - 626s 20s/step - loss: 0.0247 - accuracy: 1.0000 - val_loss: 0.5049 - val_accuracy: 0.8500
Epoch 13/15
32/32 [==============================] - 542s 17s/step - loss: 0.0163 - accuracy: 1.0000 - val_loss: 0.4810 - val_accuracy: 0.8471
Epoch 14/15
32/32 [==============================] - 355s 11s/step - loss: 0.0172 - accuracy: 0.9990 - val_loss: 0.4942 - val_accuracy: 0.8500
Epoch 15/15
32/32 [==============================] - 350s 11s/step - loss: 0.0130 - accuracy: 1.0000 - val_loss: 0.4653 - val_accuracy: 0.8676
############### Total Time Taken:  112 Minutes #############
In [63]:
results = VGG16_model.evaluate(validation_set)
print('Validation accuracy using VGG16 is : ', results[1]*100,'%')
11/11 [==============================] - 81s 7s/step - loss: 0.4611 - accuracy: 0.8618
Validation accuracy using VGG16 is :  86.17647290229797 %
In [64]:
import matplotlib.pyplot as plt
# plot the loss
plt.plot(r.history['loss'], label='train loss')
plt.plot(r.history['val_loss'], label='val loss')
plt.legend()
plt.show()

# plot the accuracy
plt.plot(r.history['accuracy'], label='train acc')
plt.plot(r.history['val_accuracy'], label='val acc')
plt.legend()
plt.show()
In [65]:
# save it as a pickle file
from tensorflow.keras.models import load_model
inception_model.save('VGG16_model.pkl')
INFO:tensorflow:Assets written to: VGG16_model.pkl\assets

We can clearly see that transfer learning models perform better than the CNN,DL and ML models. VGG16 has been chosen here are the best performing model.

GUI Implementation For The Best Performing Model

Flower To Be Predicted

predict-flower.jpg

In [68]:
import tkinter as tk
from tkinter import ttk
import pandas as pd
from keras.preprocessing import image

def import_file():
    global test_image
    file_name = file.get()
    print(file_name,' has been successfully Imported!')
    if '.jpg' in file_name:
        test_image = 'C:/Users/admin/Desktop/Great Learning/Computer Vision/Project/' + file_name
#         print(test_image)
        test_image = image.load_img(test_image,target_size=(224,224))
        test_image = image.img_to_array(test_image)
        print('The shape of the test image is: ',test_image.shape)
        test_image = np.expand_dims(test_image,axis=0)
#         print('Image has been imported!')
    var = 'Imported !'
    box = tk.Entry(win,width=10,textvariable=var)
    box.grid(row=0,column=5)
    box.insert(1,var)
    return test_image

def predict_class():
    result = VGG16_model.predict(test_image,verbose=0)
    print('The image belongs to class: ',np.argmax(result))
    box2 = tk.Entry(win,width=10,textvariable=np.argmax(result))
    box2.grid(row=1,column=3)
    var1 = 'Class : ' + str(np.argmax(result))
    box2.insert(1,var1)
       
win = tk.Tk()
win.title('Classifier GUI - Great Learning')

name = tk.Label(win,text='Step 1: Image File     ')
name.grid(row=0,column=0,sticky=tk.W)

file = tk.StringVar()
box = tk.Entry(win,width=40,textvariable=file)
box.grid(row=0,column=1)

space = tk.Label(win,text='     ')
space.grid(row=0,column=2,sticky=tk.W)

button1 = tk.Button(win,text='Import the image file',command=import_file)
button1.grid(row=0,column=3,pady = 10, padx = 10)

space = tk.Label(win,text='     ')
space.grid(row=0,column=4,sticky=tk.W)

# ------------------------------------------------- FLOWER CLASSIFIER PREDICTION ---------------------------------------------

name = tk.Label(win,text='Step 2: Predict Class     ')
name.grid(row=1,column=0,sticky=tk.W)

button2 = tk.Button(win,text='Predict',command=predict_class)
button2.grid(row=1,column=1,pady = 10, padx = 10)

space = tk.Label(win,text='     ')
space.grid(row=1,column=2,sticky=tk.W)

space = tk.Label(win,text='')
space.grid(row=1,column=3,sticky=tk.W)

win.mainloop()
predict-flower.jpg  has been successfully Imported!
The shape of the test image is:  (224, 224, 3)
The image belongs to class:  2

tkinter-output.png

------------------------------------------------------------------------------------------------------------

PART - FIVE

Q) Explain in depth your strategy to maintain and support the AIML image classifier after it in production.

Answer :

i) Scoring can't happen without training. So training setup will be expected to support discovering new features, do ultra-fast training with continuous or full learning as new data arrives - without OVERFITTING.

ii) One should also make sure that the dependicies are taken care of especially when the trained models have been sent out to the data enginnering team. This can be taken care of using docker.

iii) Models are expected to be scored with the best possible accuracies given the tradeoffs of training complexity and feature engineering involved - both real-time and batch.

iv) Sometimes you can do training on all data, but the holy grail is for the models to learn continuously from new data that arrives, real time. Thus, shortening the time to deploy in production with the latest and greatest scoring artifacts - all without loosing the fidelity of the model is important.

v) Batch scoring can be useful for image classifiers.

------------------------------------- THE END -------------------------------------------